MachineLearningAlgorithms / data /XGBoost_Regression.json
deedrop1140's picture
Upload 41 files
0d00d62 verified
{
"title": "XGBoost Regression Mastery: 100 MCQs",
"description": "A complete set of 100 multiple-choice questions covering XGBoost Regression \u2014 from basics to advanced tuning and scenario-based problem solving.",
"questions": [
{
"id": 1,
"questionText": "What does XGBoost primarily stand for in machine learning?",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 3,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 2,
"questionText": "What does XGBoost primarily stand for in machine learning?",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 3,
"questionText": "What does XGBoost primarily stand for in machine learning?",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 4,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q4)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 5,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q5)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 3,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 6,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q6)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 7,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q7)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 8,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q8)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 9,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q9)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 10,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q10)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 1,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 11,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q11)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 12,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q12)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 3,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 13,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q13)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 3,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 14,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q14)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 1,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 15,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q15)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 1,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 16,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q16)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 3,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 17,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q17)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 18,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q18)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 19,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q19)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 3,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 20,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q20)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 21,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q21)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 22,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q22)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 23,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q23)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 3,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 24,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q24)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 3,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 25,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q25)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 1,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 26,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q26)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 27,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q27)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 28,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q28)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 1,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 29,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q29)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 30,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q30)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 31,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q31)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 1,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 32,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q32)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 33,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q33)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 34,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q34)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 2,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 35,
"questionText": "What does XGBoost primarily stand for in machine learning (Easy Q35)",
"options": [
"Extreme Gradient Boosting",
"Extra Gaussian Boost",
"Extended Gradient Binary Output",
"Exponential Gain Booster"
],
"correctAnswerIndex": 0,
"explanation": "XGBoost stands for Extreme Gradient Boosting, a highly efficient and scalable implementation of gradient boosted decision trees."
},
{
"id": 36,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q36)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 1,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 37,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q37)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 38,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q38)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 39,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q39)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 40,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q40)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 41,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q41)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 42,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q42)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 43,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q43)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 44,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q44)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 1,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 45,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q45)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 46,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q46)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 47,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q47)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 1,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 48,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q48)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 49,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q49)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 50,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q50)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 51,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q51)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 52,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q52)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 53,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q53)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 54,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q54)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 55,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q55)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 56,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q56)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 57,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q57)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 1,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 58,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q58)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 1,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 59,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q59)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 1,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 60,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q60)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 61,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q61)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 62,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q62)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 63,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q63)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 64,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q64)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 65,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q65)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 66,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q66)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 0,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 67,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q67)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 68,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q68)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 1,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 69,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q69)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 70,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q70)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 71,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q71)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 72,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q72)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 2,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 73,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q73)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 1,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 74,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q74)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 75,
"questionText": "In XGBoost Regression, what is the purpose of the learning rate (eta) (Medium Q75)",
"options": [
"It controls the depth of trees.",
"It prevents overfitting by reducing feature importance.",
"It scales the contribution of each tree during training.",
"It increases the randomness in sampling data."
],
"correctAnswerIndex": 3,
"explanation": "The learning rate (eta) determines how much each new tree contributes to the overall model. Smaller values make learning slower but often improve generalization."
},
{
"id": 76,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q76)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 2,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 77,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q77)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 78,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q78)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 2,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 79,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q79)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 3,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 80,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q80)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 81,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q81)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 3,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 82,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q82)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 83,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q83)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 84,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q84)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 85,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q85)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 86,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q86)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 2,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 87,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q87)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 0,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 88,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q88)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 2,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 89,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q89)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 90,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q90)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 0,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 91,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q91)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 92,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q92)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 2,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 93,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q93)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 0,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 94,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q94)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 95,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q95)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 96,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q96)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 0,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 97,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q97)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 3,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 98,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q98)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 99,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q99)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 2,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
},
{
"id": 100,
"questionText": "Scenario: A data scientist is using XGBoost Regression to predict house prices. She observes the model overfits despite tuning 'max_depth' and 'learning_rate'. Which additional parameter might help reduce overfitting (Hard Q100)",
"options": [
"Increase subsample and colsample_bytree values",
"Reduce regularization parameters lambda and alpha",
"Add more trees to the ensemble",
"Set booster to 'gblinear'"
],
"correctAnswerIndex": 1,
"explanation": "Increasing subsample and colsample_bytree introduces randomness by sampling data and features for each tree, helping prevent overfitting."
}
]
}