Spaces:
Build error
Build error
Validify-testbot-1
/
botbuilder-python
/libraries
/botbuilder-ai
/tests
/luis
/test_data
/ExternalEntitiesAndComposite_v3.json
| { | |
| "entities": { | |
| "$instance": { | |
| "Address": [ | |
| { | |
| "endIndex": 13, | |
| "modelType": "Composite Entity Extractor", | |
| "recognitionSources": [ | |
| "model" | |
| ], | |
| "score": 0.7160641, | |
| "startIndex": 8, | |
| "text": "35 WA", | |
| "type": "Address" | |
| }, | |
| { | |
| "endIndex": 33, | |
| "modelType": "Composite Entity Extractor", | |
| "recognitionSources": [ | |
| "externalEntities" | |
| ], | |
| "startIndex": 17, | |
| "text": "repent harelquin", | |
| "type": "Address" | |
| } | |
| ] | |
| }, | |
| "Address": [ | |
| { | |
| "$instance": { | |
| "number": [ | |
| { | |
| "endIndex": 10, | |
| "modelType": "Prebuilt Entity Extractor", | |
| "recognitionSources": [ | |
| "model" | |
| ], | |
| "startIndex": 8, | |
| "text": "35", | |
| "type": "builtin.number" | |
| } | |
| ], | |
| "State": [ | |
| { | |
| "endIndex": 13, | |
| "modelType": "Entity Extractor", | |
| "recognitionSources": [ | |
| "model" | |
| ], | |
| "score": 0.614376, | |
| "startIndex": 11, | |
| "text": "WA", | |
| "type": "State" | |
| } | |
| ] | |
| }, | |
| "number": [ | |
| 35 | |
| ], | |
| "State": [ | |
| "WA" | |
| ] | |
| }, | |
| { | |
| "number": [ | |
| 3 | |
| ], | |
| "State": [ | |
| "France" | |
| ] | |
| } | |
| ] | |
| }, | |
| "intents": { | |
| "Cancel": { | |
| "score": 0.00325984019 | |
| }, | |
| "Delivery": { | |
| "score": 0.482009649 | |
| }, | |
| "EntityTests": { | |
| "score": 0.00372873852 | |
| }, | |
| "Greeting": { | |
| "score": 0.00283122621 | |
| }, | |
| "Help": { | |
| "score": 0.00292110164 | |
| }, | |
| "None": { | |
| "score": 0.0208108239 | |
| }, | |
| "Roles": { | |
| "score": 0.069060266 | |
| }, | |
| "search": { | |
| "score": 0.009682492 | |
| }, | |
| "SpecifyName": { | |
| "score": 0.00586992875 | |
| }, | |
| "Travel": { | |
| "score": 0.007831623 | |
| }, | |
| "Weather_GetForecast": { | |
| "score": 0.009580207 | |
| } | |
| }, | |
| "sentiment": { | |
| "label": "neutral", | |
| "score": 0.5 | |
| }, | |
| "text": "deliver 35 WA to repent harelquin", | |
| "v3": { | |
| "options": { | |
| "externalEntities": [ | |
| { | |
| "entityLength": 16, | |
| "entityName": "Address", | |
| "resolution": { | |
| "number": [ | |
| 3 | |
| ], | |
| "State": [ | |
| "France" | |
| ] | |
| }, | |
| "startIndex": 17 | |
| } | |
| ], | |
| "includeAllIntents": true, | |
| "includeAPIResults": true, | |
| "includeInstanceData": true, | |
| "log": true, | |
| "preferExternalEntities": true, | |
| "slot": "production" | |
| }, | |
| "response": { | |
| "prediction": { | |
| "entities": { | |
| "$instance": { | |
| "Address": [ | |
| { | |
| "length": 5, | |
| "modelType": "Composite Entity Extractor", | |
| "modelTypeId": 4, | |
| "recognitionSources": [ | |
| "model" | |
| ], | |
| "score": 0.7160641, | |
| "startIndex": 8, | |
| "text": "35 WA", | |
| "type": "Address" | |
| }, | |
| { | |
| "length": 16, | |
| "modelType": "Composite Entity Extractor", | |
| "modelTypeId": 4, | |
| "recognitionSources": [ | |
| "externalEntities" | |
| ], | |
| "startIndex": 17, | |
| "text": "repent harelquin", | |
| "type": "Address" | |
| } | |
| ] | |
| }, | |
| "Address": [ | |
| { | |
| "$instance": { | |
| "number": [ | |
| { | |
| "length": 2, | |
| "modelType": "Prebuilt Entity Extractor", | |
| "modelTypeId": 2, | |
| "recognitionSources": [ | |
| "model" | |
| ], | |
| "startIndex": 8, | |
| "text": "35", | |
| "type": "builtin.number" | |
| } | |
| ], | |
| "State": [ | |
| { | |
| "length": 2, | |
| "modelType": "Entity Extractor", | |
| "modelTypeId": 1, | |
| "recognitionSources": [ | |
| "model" | |
| ], | |
| "score": 0.614376, | |
| "startIndex": 11, | |
| "text": "WA", | |
| "type": "State" | |
| } | |
| ] | |
| }, | |
| "number": [ | |
| 35 | |
| ], | |
| "State": [ | |
| "WA" | |
| ] | |
| }, | |
| { | |
| "number": [ | |
| 3 | |
| ], | |
| "State": [ | |
| "France" | |
| ] | |
| } | |
| ] | |
| }, | |
| "intents": { | |
| "Cancel": { | |
| "score": 0.00325984019 | |
| }, | |
| "Delivery": { | |
| "score": 0.482009649 | |
| }, | |
| "EntityTests": { | |
| "score": 0.00372873852 | |
| }, | |
| "Greeting": { | |
| "score": 0.00283122621 | |
| }, | |
| "Help": { | |
| "score": 0.00292110164 | |
| }, | |
| "None": { | |
| "score": 0.0208108239 | |
| }, | |
| "Roles": { | |
| "score": 0.069060266 | |
| }, | |
| "search": { | |
| "score": 0.009682492 | |
| }, | |
| "SpecifyName": { | |
| "score": 0.00586992875 | |
| }, | |
| "Travel": { | |
| "score": 0.007831623 | |
| }, | |
| "Weather.GetForecast": { | |
| "score": 0.009580207 | |
| } | |
| }, | |
| "normalizedQuery": "deliver 35 wa to repent harelquin", | |
| "sentiment": { | |
| "label": "neutral", | |
| "score": 0.5 | |
| }, | |
| "topIntent": "Delivery" | |
| }, | |
| "query": "deliver 35 WA to repent harelquin" | |
| } | |
| } | |
| } |