| { |
| "title": "Decision Trees Mastery: 100 MCQs", |
| "description": "A comprehensive 100-question collection to master Decision Trees — covering fundamentals, splitting criteria, pruning, overfitting control, ensemble integration, and real-world scenarios.", |
| "questions": [ |
| { |
| "id": 1, |
| "questionText": "What is the main purpose of Decision Tree in classification task 1?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 2, |
| "questionText": "What is the main purpose of Decision Tree in classification task 2?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 3, |
| "questionText": "What is the main purpose of Decision Tree in classification task 3?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 4, |
| "questionText": "What is the main purpose of Decision Tree in classification task 4?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 5, |
| "questionText": "What is the main purpose of Decision Tree in classification task 5?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 6, |
| "questionText": "What is the main purpose of Decision Tree in classification task 6?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 7, |
| "questionText": "What is the main purpose of Decision Tree in classification task 7?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 8, |
| "questionText": "What is the main purpose of Decision Tree in classification task 8?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 9, |
| "questionText": "What is the main purpose of Decision Tree in classification task 9?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 10, |
| "questionText": "What is the main purpose of Decision Tree in classification task 10?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 11, |
| "questionText": "What is the main purpose of Decision Tree in classification task 11?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 12, |
| "questionText": "What is the main purpose of Decision Tree in classification task 12?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 13, |
| "questionText": "What is the main purpose of Decision Tree in classification task 13?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 14, |
| "questionText": "What is the main purpose of Decision Tree in classification task 14?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 15, |
| "questionText": "What is the main purpose of Decision Tree in classification task 15?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 16, |
| "questionText": "What is the main purpose of Decision Tree in classification task 16?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 17, |
| "questionText": "What is the main purpose of Decision Tree in classification task 17?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 18, |
| "questionText": "What is the main purpose of Decision Tree in classification task 18?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 19, |
| "questionText": "What is the main purpose of Decision Tree in classification task 19?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 20, |
| "questionText": "What is the main purpose of Decision Tree in classification task 20?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 21, |
| "questionText": "What is the main purpose of Decision Tree in classification task 21?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 22, |
| "questionText": "What is the main purpose of Decision Tree in classification task 22?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 23, |
| "questionText": "What is the main purpose of Decision Tree in classification task 23?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 24, |
| "questionText": "What is the main purpose of Decision Tree in classification task 24?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 25, |
| "questionText": "What is the main purpose of Decision Tree in classification task 25?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 26, |
| "questionText": "What is the main purpose of Decision Tree in classification task 26?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 27, |
| "questionText": "What is the main purpose of Decision Tree in classification task 27?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 28, |
| "questionText": "What is the main purpose of Decision Tree in classification task 28?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 29, |
| "questionText": "What is the main purpose of Decision Tree in classification task 29?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 30, |
| "questionText": "What is the main purpose of Decision Tree in classification task 30?", |
| "options": [ |
| "To predict class labels", |
| "To cluster data", |
| "To reduce dimensions", |
| "To normalize data" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Decision Trees are used to predict class labels based on input features." |
| }, |
| { |
| "id": 31, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 32, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 33, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 34, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 35, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 36, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 37, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 38, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 39, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 40, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 41, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 42, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 43, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 44, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 45, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 46, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 47, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 48, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 49, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 50, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 51, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 52, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 53, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 54, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 55, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 56, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 57, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 58, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 59, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 60, |
| "questionText": "Scenario: A Decision Tree is overfitting the training data. What should you do?", |
| "options": [ |
| "Increase tree depth", |
| "Prune the tree", |
| "Add more features", |
| "Decrease learning rate" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Pruning helps reduce overfitting by removing unnecessary branches." |
| }, |
| { |
| "id": 61, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 62, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 63, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 64, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 65, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 66, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 67, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 68, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 69, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 70, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 71, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 72, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 73, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 74, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 75, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 76, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 77, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 78, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 79, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 80, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 81, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 82, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 83, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 84, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 85, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 86, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 87, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 88, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 89, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 90, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 91, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 92, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 93, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 94, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 95, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 0, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 96, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 97, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 1, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 98, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 3, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 99, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| }, |
| { |
| "id": 100, |
| "questionText": "Scenario: You are building a Decision Tree on a dataset with continuous features and high variance. What splitting criterion might perform best?", |
| "options": [ |
| "Entropy", |
| "Information Gain", |
| "Gini Index", |
| "Chi-Square" |
| ], |
| "correctAnswerIndex": 2, |
| "explanation": "Gini Index or Information Gain are common criteria; the best depends on data distribution, but both handle continuous attributes effectively." |
| } |
| ] |
| } |