{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [], "machine_shape": "hm", "gpuType": "L4", "collapsed_sections": [ "MP8TLCP7QWcl", "yRdDaLDiQC89", "K_Wll94vQFdH", "OJ3rAq90SUio" ] }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" }, "accelerator": "GPU", "widgets": { "application/vnd.jupyter.widget-state+json": { "a824c569d05747f0b97baef1bc7667db": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_c4ef0c45b6334bb08bd79534056fd2ba", "IPY_MODEL_edb63a98fc79464b9bf7e32b0371133c", "IPY_MODEL_0bfff8686207433d877ba2b89b9f3651" ], "layout": "IPY_MODEL_d144fa9116f5401aa3b4e95c43699edd" } }, "c4ef0c45b6334bb08bd79534056fd2ba": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_d8762155a1514783b42f8bdb14e143df", "placeholder": "​", "style": "IPY_MODEL_5ce5f95d8d054118a8decf6e98eb9c8f", "value": "Resolving data files: 100%" } }, "edb63a98fc79464b9bf7e32b0371133c": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_83b6ac6b6fbb453190c88afc083746af", "max": 66, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_c14c2ebf72844828a5c4480c531bc97d", "value": 66 } }, "0bfff8686207433d877ba2b89b9f3651": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_12c242032efc43dbb14b71dfab5d6151", "placeholder": "​", "style": "IPY_MODEL_b356594746ab4986ab16a6eb56f4bfa8", "value": " 66/66 [00:00<00:00, 5930.25it/s]" } }, "d144fa9116f5401aa3b4e95c43699edd": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "d8762155a1514783b42f8bdb14e143df": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "5ce5f95d8d054118a8decf6e98eb9c8f": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "83b6ac6b6fbb453190c88afc083746af": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "c14c2ebf72844828a5c4480c531bc97d": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "12c242032efc43dbb14b71dfab5d6151": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "b356594746ab4986ab16a6eb56f4bfa8": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "7d0bb0ba501844ae8c02b465bb32e34b": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_becd12fb40eb4bb09a08255dbe0274e0", "IPY_MODEL_994708116d614ed78c330a1ad765acee", "IPY_MODEL_7acf229a9b70471fae35644098646bbf" ], "layout": "IPY_MODEL_557b3e8e959e41d187af9ee37e426421" } }, "becd12fb40eb4bb09a08255dbe0274e0": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_d785c61c21e245d4bdc8a9bc952400ca", "placeholder": "​", "style": "IPY_MODEL_75742c0449be4e6aae7e56e589e45ac2", "value": "Resolving data files: 100%" } }, "994708116d614ed78c330a1ad765acee": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_7c4e156100704ce3bffbb17655f74bba", "max": 65, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_50f5f99b993a4d8d9ec4137218e5c904", "value": 65 } }, "7acf229a9b70471fae35644098646bbf": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_7a243d14bbf545f8aac4137b7c4615f7", "placeholder": "​", "style": "IPY_MODEL_85b52b98c3fc491eae517d49f4fc819b", "value": " 65/65 [00:00<00:00, 7106.03it/s]" } }, "557b3e8e959e41d187af9ee37e426421": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "d785c61c21e245d4bdc8a9bc952400ca": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "75742c0449be4e6aae7e56e589e45ac2": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "7c4e156100704ce3bffbb17655f74bba": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "50f5f99b993a4d8d9ec4137218e5c904": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "7a243d14bbf545f8aac4137b7c4615f7": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "85b52b98c3fc491eae517d49f4fc819b": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "d5da1dd9be2b4cbfad8548fda97da2b9": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_2095591243aa4fbbb2630de2d7b9c17e", "IPY_MODEL_afa0b24df2944a21ab49876f66da9178", "IPY_MODEL_98150018b6b84fc6ade2b02b73c5eebc" ], "layout": "IPY_MODEL_548643ef1a0e48f08aba06fce887212c" } }, "2095591243aa4fbbb2630de2d7b9c17e": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_caa91bf2983845d487cbb6730a2f3331", "placeholder": "​", "style": "IPY_MODEL_ed9bf7e941b9435391c51b58eb960110", "value": "Loading dataset shards: 100%" } }, "afa0b24df2944a21ab49876f66da9178": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_2447e20093bf48adaa18229aef5a1864", "max": 33, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_6baa7af909904d57b98e9a41f613ab2a", "value": 33 } }, "98150018b6b84fc6ade2b02b73c5eebc": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_c11b2bbb7f364ea69a0efea9d939c2f9", "placeholder": "​", "style": "IPY_MODEL_b4e89d997855457195890c48b7e3424a", "value": " 33/33 [00:00<00:00, 1822.84it/s]" } }, "548643ef1a0e48f08aba06fce887212c": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "caa91bf2983845d487cbb6730a2f3331": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "ed9bf7e941b9435391c51b58eb960110": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "2447e20093bf48adaa18229aef5a1864": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "6baa7af909904d57b98e9a41f613ab2a": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "c11b2bbb7f364ea69a0efea9d939c2f9": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "b4e89d997855457195890c48b7e3424a": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "48bef5b490234f2baf14db6bddf12f08": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_fbba1ac1b514479085cd8e0b1886803f", "IPY_MODEL_6ceb7e253498490aa71689db9b642154", "IPY_MODEL_10df888e325e460f9d19338d0dd6d7fb" ], "layout": "IPY_MODEL_f33efb79e8ae4659be7e6750c86061d0" } }, "fbba1ac1b514479085cd8e0b1886803f": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_c8cf9c087bcb45c18ed374a5d6a2a290", "placeholder": "​", "style": "IPY_MODEL_4fc84e32901446f9b698c80a5a3ebc4c", "value": "Processing Files (1 / 1)      : 100%" } }, "6ceb7e253498490aa71689db9b642154": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_4c8d79638cc54f60af561d467c544551", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_d3b3c59df7aa44559fabc49966d0e608", "value": 1 } }, "10df888e325e460f9d19338d0dd6d7fb": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_940bb1507e51412bb00a9b21fb454c45", "placeholder": "​", "style": "IPY_MODEL_6f5a55718cd94140b30b0eafb5d81fb7", "value": " 22.1MB / 22.1MB, 12.3MB/s  " } }, "f33efb79e8ae4659be7e6750c86061d0": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "c8cf9c087bcb45c18ed374a5d6a2a290": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "4fc84e32901446f9b698c80a5a3ebc4c": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "4c8d79638cc54f60af561d467c544551": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "d3b3c59df7aa44559fabc49966d0e608": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "940bb1507e51412bb00a9b21fb454c45": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "6f5a55718cd94140b30b0eafb5d81fb7": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "1c33bbf6f5a3443d8626b364ffe53900": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_de020a89a80946d1a9fe3df2fea1a93e", "IPY_MODEL_01ec5af06ec14f2395a63820a7babd38", "IPY_MODEL_bbe58f20eb5a468c9aa8d7809cbbcaba" ], "layout": "IPY_MODEL_035a42d61ecb4016a496b5e91056c6f6" } }, "de020a89a80946d1a9fe3df2fea1a93e": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_1f261cc758404223a4de300390278582", "placeholder": "​", "style": "IPY_MODEL_a8c870726ef549e9b8d2084540c7a43c", "value": "New Data Upload               : 100%" } }, "01ec5af06ec14f2395a63820a7babd38": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_16bad21b7fa042108320272027ad41e7", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_940dac4af4664b74a29dc00de3bd059b", "value": 1 } }, "bbe58f20eb5a468c9aa8d7809cbbcaba": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_3f3fdae451974050bfc16b981a85a4a4", "placeholder": "​", "style": "IPY_MODEL_d803cf137a6d42ecbfb581f624d13008", "value": " 22.1MB / 22.1MB, 12.3MB/s  " } }, "035a42d61ecb4016a496b5e91056c6f6": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "1f261cc758404223a4de300390278582": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "a8c870726ef549e9b8d2084540c7a43c": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "16bad21b7fa042108320272027ad41e7": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "940dac4af4664b74a29dc00de3bd059b": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "3f3fdae451974050bfc16b981a85a4a4": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "d803cf137a6d42ecbfb581f624d13008": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "2559eb52f664442ca72c7e41570c6f8d": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_b2a624c1929247908413ac2e63cd1eaa", "IPY_MODEL_145376c330ab4f4eb42731cfbe8e4e51", "IPY_MODEL_721c8ab28ceb4c9f9b05f8b0c6e3b9b8" ], "layout": "IPY_MODEL_6b3414baf1b24d2db751e3fb830a5793" } }, "b2a624c1929247908413ac2e63cd1eaa": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_834da2d31852453bac1c3e3302e4bd5f", "placeholder": "​", "style": "IPY_MODEL_38f488b1b76542628e99fb8d571c338a", "value": "  ...to/text_vae/best_model.pt: 100%" } }, "145376c330ab4f4eb42731cfbe8e4e51": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_dd811a3fab4342928f81b86d0bb049d5", "max": 22087733, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_64d458a8ca124bd3b3b35be00156db19", "value": 22087733 } }, "721c8ab28ceb4c9f9b05f8b0c6e3b9b8": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_9e2a862615914bd9a0436ca21db57158", "placeholder": "​", "style": "IPY_MODEL_4b01ab4560f44c109d23f9d6fe6eeb00", "value": " 22.1MB / 22.1MB            " } }, "6b3414baf1b24d2db751e3fb830a5793": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "834da2d31852453bac1c3e3302e4bd5f": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "38f488b1b76542628e99fb8d571c338a": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "dd811a3fab4342928f81b86d0bb049d5": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "64d458a8ca124bd3b3b35be00156db19": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "9e2a862615914bd9a0436ca21db57158": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "4b01ab4560f44c109d23f9d6fe6eeb00": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "a8bf7449cde0439286bdbd6a190ddf12": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_eca1bf0850cc43adbe16d580354bd54c", "IPY_MODEL_899af0a82d7d41daa5758066ec06be94", "IPY_MODEL_7102935e1f3a4ce6a779315b6d154ecc" ], "layout": "IPY_MODEL_b6581848560e439a8dc563ed3f982e39" } }, "eca1bf0850cc43adbe16d580354bd54c": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_c4fad0572fe84d9c9f1b6e610916a410", "placeholder": "​", "style": "IPY_MODEL_08651736f81b4e15be2ddcd2d6655531", "value": "Processing Files (1 / 1)      : 100%" } }, "899af0a82d7d41daa5758066ec06be94": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_67ed4bc132184c3ab720be400ff3e00a", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_171f461d7529424a8cd7270c2a0434b7", "value": 1 } }, "7102935e1f3a4ce6a779315b6d154ecc": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_83d442b93e964481adadb0133838fd41", "placeholder": "​", "style": "IPY_MODEL_166d7715822c43f497715d76461ea1f2", "value": " 2.44MB / 2.44MB, 30.9kB/s  " } }, "b6581848560e439a8dc563ed3f982e39": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "c4fad0572fe84d9c9f1b6e610916a410": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "08651736f81b4e15be2ddcd2d6655531": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "67ed4bc132184c3ab720be400ff3e00a": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "171f461d7529424a8cd7270c2a0434b7": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "83d442b93e964481adadb0133838fd41": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "166d7715822c43f497715d76461ea1f2": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "03b59b9059e1415f96f0880aeda6b4f2": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_a13b2f1080344f22b69d7d0263225df9", "IPY_MODEL_5ae9fb060a1e47aeba36f0b2c1d5fd24", "IPY_MODEL_5effcd91c27e434a9259a17453b16c10" ], "layout": "IPY_MODEL_75630b3146ae402eb819808b2b10312d" } }, "a13b2f1080344f22b69d7d0263225df9": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_1ee3bc6aae2943688f94447c7352d862", "placeholder": "​", "style": "IPY_MODEL_702050e89c3d4860b4fc25b3b716c926", "value": "New Data Upload               : 100%" } }, "5ae9fb060a1e47aeba36f0b2c1d5fd24": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_ce0940198b984894bc47020cd6a87c33", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_21f6e01a2b9f4f2abf5ba0f8f05e4ea0", "value": 1 } }, "5effcd91c27e434a9259a17453b16c10": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_3805b7b343164f52a05e8274fae8eccc", "placeholder": "​", "style": "IPY_MODEL_93c03ab91bee423db2c9e2d612b74e2a", "value": " 2.44MB / 2.44MB, 30.9kB/s  " } }, "75630b3146ae402eb819808b2b10312d": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "1ee3bc6aae2943688f94447c7352d862": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "702050e89c3d4860b4fc25b3b716c926": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "ce0940198b984894bc47020cd6a87c33": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "21f6e01a2b9f4f2abf5ba0f8f05e4ea0": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "3805b7b343164f52a05e8274fae8eccc": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "93c03ab91bee423db2c9e2d612b74e2a": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "edfc262bdd654caea5673fbd28336495": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_8bc2f5185d2e453b8e2d7533bab86e89", "IPY_MODEL_4ff2347af4bb40d9ad9606d7e56cd557", "IPY_MODEL_44177f7b1770473990f7406f91150df2" ], "layout": "IPY_MODEL_1beeda47adf2488db048a323cc5ac01f" } }, "8bc2f5185d2e453b8e2d7533bab86e89": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_227526740533437ea1518d02c5247b1a", "placeholder": "​", "style": "IPY_MODEL_4a11361380794c3d8322803064fbb7f5", "value": "  ...roto/text_vae/val_data.pt: 100%" } }, "4ff2347af4bb40d9ad9606d7e56cd557": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_e7774f68762341b4bb343be9773710f5", "max": 2442285, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_53e8b32982e344c59116da8be8388fd2", "value": 2442285 } }, "44177f7b1770473990f7406f91150df2": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_7af22f4527c04bf7bfa36fb3a0577355", "placeholder": "​", "style": "IPY_MODEL_b38cd95a71bc49b28bcc3c6f4d3a983e", "value": " 2.44MB / 2.44MB            " } }, "1beeda47adf2488db048a323cc5ac01f": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "227526740533437ea1518d02c5247b1a": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "4a11361380794c3d8322803064fbb7f5": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "e7774f68762341b4bb343be9773710f5": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "53e8b32982e344c59116da8be8388fd2": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "7af22f4527c04bf7bfa36fb3a0577355": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "b38cd95a71bc49b28bcc3c6f4d3a983e": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "7bbfae1c4c40413aa086829c6cf0261e": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_b84915cf8a054e2bb390569139e34064", "IPY_MODEL_bd0043231e734ab394e06f9e713222a9", "IPY_MODEL_5b95cd580dc54815b622a0536d70a2a8" ], "layout": "IPY_MODEL_8c9810cd315a4d87abf63dbece4e3594" } }, "b84915cf8a054e2bb390569139e34064": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_426046ecd2a049708fa7352a1c56609e", "placeholder": "​", "style": "IPY_MODEL_7092903961a24ec3a015c7afbea6be12", "value": "Processing Files (1 / 1)      : 100%" } }, "bd0043231e734ab394e06f9e713222a9": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_891c668ba18944f38f09752ff43ca694", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_60cf7b30570e477a9ec98d142465ce94", "value": 1 } }, "5b95cd580dc54815b622a0536d70a2a8": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_af7396d34dbd4344a03db8b3ee632301", "placeholder": "​", "style": "IPY_MODEL_8f4f8f3aa31e46ecaa1eafa19e1cf568", "value": " 76.9MB / 76.9MB, 25.6MB/s  " } }, "8c9810cd315a4d87abf63dbece4e3594": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "426046ecd2a049708fa7352a1c56609e": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "7092903961a24ec3a015c7afbea6be12": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "891c668ba18944f38f09752ff43ca694": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "60cf7b30570e477a9ec98d142465ce94": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "af7396d34dbd4344a03db8b3ee632301": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "8f4f8f3aa31e46ecaa1eafa19e1cf568": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "2bd4a90a658241b396ea80f786cf85f1": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_ccd378b0c2c84f7ca711c1e6db5111d3", "IPY_MODEL_8b744269b2d94d9eb594f1ae3121056e", "IPY_MODEL_a8143ec35c084e679714d8aa2f5a65bb" ], "layout": "IPY_MODEL_b70351c8dc1a4d549ca4f2c24662ad23" } }, "ccd378b0c2c84f7ca711c1e6db5111d3": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_b7e8170d303a4b1787fd8bfc15595c25", "placeholder": "​", "style": "IPY_MODEL_ec31080dc1b84d0ea4888ea50fb2e296", "value": "New Data Upload               : 100%" } }, "8b744269b2d94d9eb594f1ae3121056e": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_66d0c3cb6ebb4bbfa2f0f64ee71f3c80", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_76fe536aa1fc42c2b75fcf381228af9c", "value": 1 } }, "a8143ec35c084e679714d8aa2f5a65bb": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_aee04183d5f944d08bdcb47182dcb559", "placeholder": "​", "style": "IPY_MODEL_8e76e599205548ac94769bda31d96dd3", "value": " 76.9MB / 76.9MB, 25.6MB/s  " } }, "b70351c8dc1a4d549ca4f2c24662ad23": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "b7e8170d303a4b1787fd8bfc15595c25": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "ec31080dc1b84d0ea4888ea50fb2e296": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "66d0c3cb6ebb4bbfa2f0f64ee71f3c80": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "76fe536aa1fc42c2b75fcf381228af9c": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "aee04183d5f944d08bdcb47182dcb559": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "8e76e599205548ac94769bda31d96dd3": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "8d478c17e97e4f1e9e71e60808496e28": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_26065463bd2242cea0bb2e2186253a8e", "IPY_MODEL_52c3117a3915465e9d32080aa81133d2", "IPY_MODEL_7810dd9e5a7345ccbbce7f5f842257c5" ], "layout": "IPY_MODEL_75391687beba44fcb865e94826c18448" } }, "26065463bd2242cea0bb2e2186253a8e": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_5befde6cd9be4979aae7a206566489f0", "placeholder": "​", "style": "IPY_MODEL_4f18c1dc8bbe44f88c5e45cf011d9b8a", "value": "  ...ext_geometric_features.pt: 100%" } }, "52c3117a3915465e9d32080aa81133d2": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_1dac7865fef94b17a4c7f7cdbc9e66a5", "max": 76940081, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_ab77cd6070c04165bb3ab5fec8462cea", "value": 76940081 } }, "7810dd9e5a7345ccbbce7f5f842257c5": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_8fa32d20f04b4028a50ca3756902057d", "placeholder": "​", "style": "IPY_MODEL_8eb538cf7e11449193f09b50c5972527", "value": " 76.9MB / 76.9MB            " } }, "75391687beba44fcb865e94826c18448": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "5befde6cd9be4979aae7a206566489f0": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "4f18c1dc8bbe44f88c5e45cf011d9b8a": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "1dac7865fef94b17a4c7f7cdbc9e66a5": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "ab77cd6070c04165bb3ab5fec8462cea": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "8fa32d20f04b4028a50ca3756902057d": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "8eb538cf7e11449193f09b50c5972527": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "18d06944cab84229bfc895be3045fb68": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_336f4226b1404d9caf17957d0180ebe7", "IPY_MODEL_d2f78086a06f466cb9f898e369b31737", "IPY_MODEL_c8949f7b04404b26a2e2c07970103734" ], "layout": "IPY_MODEL_85157b23501c456d97281e8036ac017b" } }, "336f4226b1404d9caf17957d0180ebe7": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_546a1931aca64c0dbd346300bc13f663", "placeholder": "​", "style": "IPY_MODEL_d97f213da8be41458cb3c5cece728895", "value": "Resolving data files: 100%" } }, "d2f78086a06f466cb9f898e369b31737": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_bf7a710cf05649d8889a75c0787a8c54", "max": 66, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_307d0098eeac4b4a96f2bbeed4cae44a", "value": 66 } }, "c8949f7b04404b26a2e2c07970103734": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_28d12d4c5b684251a7012657e41ae3ed", "placeholder": "​", "style": "IPY_MODEL_8a715535edb3453eb1f7c89a541c2128", "value": " 66/66 [00:00<00:00, 5610.31it/s]" } }, "85157b23501c456d97281e8036ac017b": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "546a1931aca64c0dbd346300bc13f663": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "d97f213da8be41458cb3c5cece728895": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "bf7a710cf05649d8889a75c0787a8c54": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "307d0098eeac4b4a96f2bbeed4cae44a": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "28d12d4c5b684251a7012657e41ae3ed": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "8a715535edb3453eb1f7c89a541c2128": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "b7d99cf4da9f4b33ad45e4ead2f5fe94": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_862e2ebceb4d4319b4bb3a614c37fb3f", "IPY_MODEL_642c80df2cac4071973cfd3f2a9832aa", "IPY_MODEL_aff9d3e555d244aeb292accd33f2b1d8" ], "layout": "IPY_MODEL_2a7a6cc636f8464bb7f61d9654b91c7e" } }, "862e2ebceb4d4319b4bb3a614c37fb3f": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_fec4bf8d1247430baf5f9a14114d1111", "placeholder": "​", "style": "IPY_MODEL_150bd779791d40ef828053256959e497", "value": "Resolving data files: 100%" } }, "642c80df2cac4071973cfd3f2a9832aa": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_d136acf173144a3fb2cd0804e7f7d1fd", "max": 65, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_05eb480f62614bd6b4dfdf84d176fd0a", "value": 65 } }, "aff9d3e555d244aeb292accd33f2b1d8": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_4fa74733675d4af0b0b9b8f2c241e74c", "placeholder": "​", "style": "IPY_MODEL_39cd54b59c4d4ec8be684ea8cf454863", "value": " 65/65 [00:00<00:00, 7319.90it/s]" } }, "2a7a6cc636f8464bb7f61d9654b91c7e": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "fec4bf8d1247430baf5f9a14114d1111": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "150bd779791d40ef828053256959e497": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "d136acf173144a3fb2cd0804e7f7d1fd": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "05eb480f62614bd6b4dfdf84d176fd0a": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "4fa74733675d4af0b0b9b8f2c241e74c": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "39cd54b59c4d4ec8be684ea8cf454863": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "43b7ace8e5fe4afcaca3c8f16ea87d12": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_ea78900a078a453cb2a9958112d4fd3d", "IPY_MODEL_03f2d0c2f98b4e3f853da73866b1f545", "IPY_MODEL_1333733eb94040f9b89cfe432ac630ac" ], "layout": "IPY_MODEL_a0d3bf50dc2e40d4a2503ee33faef5d4" } }, "ea78900a078a453cb2a9958112d4fd3d": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_266e0cbd75064af48cecdc2ca8d8fda9", "placeholder": "​", "style": "IPY_MODEL_caedc4685ca1401fa3531422c9275f41", "value": "Loading dataset shards: 100%" } }, "03f2d0c2f98b4e3f853da73866b1f545": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_41e9f6bec0f04a4c86bcbf8d99a5cbec", "max": 33, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_f0f64a03c8394835a058bd5531dadbc3", "value": 33 } }, "1333733eb94040f9b89cfe432ac630ac": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_8ae8358c0e0f40deb6e35b04035c55e8", "placeholder": "​", "style": "IPY_MODEL_394a822c5b7c4944af8425c04159518e", "value": " 33/33 [00:00<00:00, 1586.48it/s]" } }, "a0d3bf50dc2e40d4a2503ee33faef5d4": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "266e0cbd75064af48cecdc2ca8d8fda9": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "caedc4685ca1401fa3531422c9275f41": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "41e9f6bec0f04a4c86bcbf8d99a5cbec": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "f0f64a03c8394835a058bd5531dadbc3": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "8ae8358c0e0f40deb6e35b04035c55e8": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "394a822c5b7c4944af8425c04159518e": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "b5ea8fff71e3412eb8bac4b8b9361b57": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_73d5d37affca4f03b61255c91cf6b0e3", "IPY_MODEL_98d35b0afb09470f80d4ddb3e4d335e6", "IPY_MODEL_657e758a892148538b581eca528aa9e4" ], "layout": "IPY_MODEL_f8aa47961fa64655b1258697a896826b" } }, "73d5d37affca4f03b61255c91cf6b0e3": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_f36c100ec8be42acbda64e61e8ec843c", "placeholder": "​", "style": "IPY_MODEL_07b07954cbc84fcb95630911960665b0", "value": "Loading weights: 100%" } }, "98d35b0afb09470f80d4ddb3e4d335e6": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_19b37d811f804a888b9a421cbe814f45", "max": 199, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_610830e6733b46eda52e94143b85b9c0", "value": 199 } }, "657e758a892148538b581eca528aa9e4": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_1db0a35340144968b3d5cb6e15822b43", "placeholder": "​", "style": "IPY_MODEL_bd13afbac48a4ffb9d2d07bb160e694d", "value": " 199/199 [00:00<00:00, 993.22it/s, Materializing param=pooler.dense.weight]" } }, "f8aa47961fa64655b1258697a896826b": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "f36c100ec8be42acbda64e61e8ec843c": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "07b07954cbc84fcb95630911960665b0": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "19b37d811f804a888b9a421cbe814f45": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "610830e6733b46eda52e94143b85b9c0": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "1db0a35340144968b3d5cb6e15822b43": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "bd13afbac48a4ffb9d2d07bb160e694d": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "5277dbd3e7e14f76baa5350537005fe3": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_43316455d4424cd7bfab5de4c02cb422", "IPY_MODEL_54d0eecd566547828f49b3c40e3a7f02", "IPY_MODEL_37b8e63ca39048a8942118bd5c6d61e3" ], "layout": "IPY_MODEL_4758fa24b11d4a7f84da125b1e2b9e10" } }, "43316455d4424cd7bfab5de4c02cb422": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_d6272da4a71d4f6ea65b5ecd641a38b0", "placeholder": "​", "style": "IPY_MODEL_df5db664e6ba4ab889fe471167b9347a", "value": "Processing Files (1 / 1)      : 100%" } }, "54d0eecd566547828f49b3c40e3a7f02": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_bbb614acc44649a18739cddb915c62a8", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_a60cdb4f9c16410e82f9d07274c0f7dd", "value": 1 } }, "37b8e63ca39048a8942118bd5c6d61e3": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_5d6d1c715a3a48eabbb1bc0f3341fdd7", "placeholder": "​", "style": "IPY_MODEL_3978a71c259943bcb0d0337c998d4448", "value": " 23.1MB / 23.1MB, 14.5MB/s  " } }, "4758fa24b11d4a7f84da125b1e2b9e10": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "d6272da4a71d4f6ea65b5ecd641a38b0": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "df5db664e6ba4ab889fe471167b9347a": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "bbb614acc44649a18739cddb915c62a8": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "a60cdb4f9c16410e82f9d07274c0f7dd": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "5d6d1c715a3a48eabbb1bc0f3341fdd7": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "3978a71c259943bcb0d0337c998d4448": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "c53b2b33e0824b018f0ae22e082b46be": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_b4dcd8a699de4c3cbb3f5ea5211b4e8e", "IPY_MODEL_39a72dbf71404cfaafce9f77f465e487", "IPY_MODEL_d245921984574376973def38e0d53105" ], "layout": "IPY_MODEL_a926fcfdaa7e48e29399e57e2cb346e3" } }, "b4dcd8a699de4c3cbb3f5ea5211b4e8e": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_fa2142485a2044178afd06bdabc6e270", "placeholder": "​", "style": "IPY_MODEL_c75f0fd940c2440596d5267e9aace4fd", "value": "New Data Upload               : 100%" } }, "39a72dbf71404cfaafce9f77f465e487": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_569b55b72b90457b97b148c62a888c83", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_3283c7a2b8db4e8ab124ec31c1ce18fd", "value": 1 } }, "d245921984574376973def38e0d53105": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_bf64c4f10d524f918022ddddc2b7ef36", "placeholder": "​", "style": "IPY_MODEL_d23c40e7ef044857b38765b7fea13050", "value": " 23.1MB / 23.1MB, 14.5MB/s  " } }, "a926fcfdaa7e48e29399e57e2cb346e3": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "fa2142485a2044178afd06bdabc6e270": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "c75f0fd940c2440596d5267e9aace4fd": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "569b55b72b90457b97b148c62a888c83": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "3283c7a2b8db4e8ab124ec31c1ce18fd": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "bf64c4f10d524f918022ddddc2b7ef36": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "d23c40e7ef044857b38765b7fea13050": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "15522ec035684d7eb76cb9841bf4683f": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_6814a254a9ff4784b88d0e3d6a08961a", "IPY_MODEL_97887435275d4bd9b1c61203de3ce5ea", "IPY_MODEL_ef096e7fc3c6447cb47f1db8d6317cd3" ], "layout": "IPY_MODEL_76c29f13f6e14fe694a08ac3045e2c31" } }, "6814a254a9ff4784b88d0e3d6a08961a": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_cbb8144324844453b7a636c950c6b5a0", "placeholder": "​", "style": "IPY_MODEL_a3324b569447418ea079048909f572aa", "value": "  ...to/bert_vae/best_model.pt: 100%" } }, "97887435275d4bd9b1c61203de3ce5ea": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_7af698b582d648478a179ddffba12d55", "max": 23136309, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_dadbc15c6e7a48c4b5991e2fd1408b21", "value": 23136309 } }, "ef096e7fc3c6447cb47f1db8d6317cd3": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_e3cd669ba96d42babf7b63f7dbcb8ca9", "placeholder": "​", "style": "IPY_MODEL_c438ea2e0a9f4dbdb0f055071129be89", "value": " 23.1MB / 23.1MB            " } }, "76c29f13f6e14fe694a08ac3045e2c31": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "cbb8144324844453b7a636c950c6b5a0": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "a3324b569447418ea079048909f572aa": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "7af698b582d648478a179ddffba12d55": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "dadbc15c6e7a48c4b5991e2fd1408b21": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "e3cd669ba96d42babf7b63f7dbcb8ca9": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "c438ea2e0a9f4dbdb0f055071129be89": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "812a049de25646d58b54122a96fee6d7": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_6e7c87852a584f7380bd2c3be536e39b", "IPY_MODEL_04314ae0e4184652a05b41c51506e1a7", "IPY_MODEL_5c719605acaa4d94a1fa4a6715db0fa6" ], "layout": "IPY_MODEL_79ac7b7df0f54575a4db87863ab19992" } }, "6e7c87852a584f7380bd2c3be536e39b": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_e92d0da9bda4420dae614da72a1a0f10", "placeholder": "​", "style": "IPY_MODEL_756f43561bb8483195e44be41c7b8dd2", "value": "Processing Files (1 / 1)      : 100%" } }, "04314ae0e4184652a05b41c51506e1a7": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_749ca8072a614f6b9f397441ffe85af4", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_e10ab97193dc4a568e083bdc94e8dc77", "value": 1 } }, "5c719605acaa4d94a1fa4a6715db0fa6": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_6605440d453d45248a2f5e1f17877373", "placeholder": "​", "style": "IPY_MODEL_7e5a842ff0fe433ea2b67482dd921120", "value": " 3.45MB / 3.45MB,  868kB/s  " } }, "79ac7b7df0f54575a4db87863ab19992": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "e92d0da9bda4420dae614da72a1a0f10": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "756f43561bb8483195e44be41c7b8dd2": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "749ca8072a614f6b9f397441ffe85af4": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "e10ab97193dc4a568e083bdc94e8dc77": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "6605440d453d45248a2f5e1f17877373": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "7e5a842ff0fe433ea2b67482dd921120": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "4b5b597289054ee8b9b84cd056bb86a1": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_4df17aa15bd4472bbc33e54a37363e67", "IPY_MODEL_de2397d36ce642ed84fb15a57c633211", "IPY_MODEL_732ed80696ac42358fca176b123b8ea5" ], "layout": "IPY_MODEL_549ec071258a4c56956583f593ae5eb1" } }, "4df17aa15bd4472bbc33e54a37363e67": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_cb7421659ca84422bdd717e7baa0d49e", "placeholder": "​", "style": "IPY_MODEL_58bf0404288c47e191e7c93034635e57", "value": "New Data Upload               : 100%" } }, "de2397d36ce642ed84fb15a57c633211": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_659ccfdbd14544c2b1bfd14b8c158815", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_d5fe3b25a93045daaf2ee9cd278fcbef", "value": 1 } }, "732ed80696ac42358fca176b123b8ea5": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_56a8fa94d23d458ba079fd394cf89c21", "placeholder": "​", "style": "IPY_MODEL_6cae88e9c5074e9486e3e1e9fb35086a", "value": " 3.45MB / 3.45MB,  868kB/s  " } }, "549ec071258a4c56956583f593ae5eb1": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "cb7421659ca84422bdd717e7baa0d49e": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "58bf0404288c47e191e7c93034635e57": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "659ccfdbd14544c2b1bfd14b8c158815": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "d5fe3b25a93045daaf2ee9cd278fcbef": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "56a8fa94d23d458ba079fd394cf89c21": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "6cae88e9c5074e9486e3e1e9fb35086a": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "54e5e472dd7b47dd88bffb26e19ec02b": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_88767a47d1904f77a769f35147ea0996", "IPY_MODEL_97245e35f05c4ac4ab66e251ba132442", "IPY_MODEL_ba8ed902ea314bd58ecba97b97a7ef81" ], "layout": "IPY_MODEL_a993dc68470341199ce556820db32d68" } }, "88767a47d1904f77a769f35147ea0996": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_24d0a2ea1c6746b2ab2647549418217e", "placeholder": "​", "style": "IPY_MODEL_90d55e00a32148f389ad17d6b08e27ea", "value": "  ...roto/bert_vae/val_data.pt: 100%" } }, "97245e35f05c4ac4ab66e251ba132442": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_d84d641b99004aa695ac6d6d8ee20fe0", "max": 3450989, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_e45bbe53c4c646759c59c2463b56b72f", "value": 3450989 } }, "ba8ed902ea314bd58ecba97b97a7ef81": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_add21858275b4086aaadf4e4e147f47a", "placeholder": "​", "style": "IPY_MODEL_cdf44dfc96bf4cec992264275abad639", "value": " 3.45MB / 3.45MB            " } }, "a993dc68470341199ce556820db32d68": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "24d0a2ea1c6746b2ab2647549418217e": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "90d55e00a32148f389ad17d6b08e27ea": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "d84d641b99004aa695ac6d6d8ee20fe0": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "e45bbe53c4c646759c59c2463b56b72f": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "add21858275b4086aaadf4e4e147f47a": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "cdf44dfc96bf4cec992264275abad639": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "0f89caf18b7c4eb1a4f9defa2ffc48f8": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_c884fb3da9d04060a5eaef9bfbfdec27", "IPY_MODEL_593a501cca8e4b7993950d0a89e6279e", "IPY_MODEL_ff6e11d99d654f6fba14f349374b166b" ], "layout": "IPY_MODEL_ddc58fb24b6d4f7d96868cdea6460661" } }, "c884fb3da9d04060a5eaef9bfbfdec27": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_1a228dde99c1405088b21a6fc5144615", "placeholder": "​", "style": "IPY_MODEL_c43c5fcebd39475788f1774aecdbc2be", "value": "Processing Files (1 / 1)      : 100%" } }, "593a501cca8e4b7993950d0a89e6279e": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_435e4e00ac85424a9ca91248b7e7eeb0", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_d6c2ef1bb9024afd832e8f3738eebd9c", "value": 1 } }, "ff6e11d99d654f6fba14f349374b166b": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_c90c91b5f35a4bbd8057602f601ad098", "placeholder": "​", "style": "IPY_MODEL_789b7025d9954c639db31a5d8b81062a", "value": " 76.9MB / 76.9MB, 25.6MB/s  " } }, "ddc58fb24b6d4f7d96868cdea6460661": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "1a228dde99c1405088b21a6fc5144615": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "c43c5fcebd39475788f1774aecdbc2be": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "435e4e00ac85424a9ca91248b7e7eeb0": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "d6c2ef1bb9024afd832e8f3738eebd9c": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "c90c91b5f35a4bbd8057602f601ad098": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "789b7025d9954c639db31a5d8b81062a": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "07beebb910034f6ab04a3d4259842109": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_c3cbbd3337734566899ecf007f29a8ff", "IPY_MODEL_9bef238235a943f99da8ea8a77fefb0c", "IPY_MODEL_cc6d23814aa6443197ce798784517c47" ], "layout": "IPY_MODEL_018b62a225f34f35a76d62db686f0abc" } }, "c3cbbd3337734566899ecf007f29a8ff": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_081ddaf1f251443fa5e7588cb910e92b", "placeholder": "​", "style": "IPY_MODEL_adf828bcc3ae4bc9b96f5b930060c320", "value": "New Data Upload               : 100%" } }, "9bef238235a943f99da8ea8a77fefb0c": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_6d1ef626f1c64917acf0842c0fee6aa7", "max": 1, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_25a85b151d6e4c1b9d445ebb831b7112", "value": 1 } }, "cc6d23814aa6443197ce798784517c47": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_3e091734df3d4baebebb02f9cbb63ee6", "placeholder": "​", "style": "IPY_MODEL_8552d8d708184275a00b2559f2595e47", "value": " 76.9MB / 76.9MB, 25.6MB/s  " } }, "018b62a225f34f35a76d62db686f0abc": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "081ddaf1f251443fa5e7588cb910e92b": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "adf828bcc3ae4bc9b96f5b930060c320": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "6d1ef626f1c64917acf0842c0fee6aa7": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": "20px" } }, "25a85b151d6e4c1b9d445ebb831b7112": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "3e091734df3d4baebebb02f9cbb63ee6": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "8552d8d708184275a00b2559f2595e47": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "83b732e4b23b4447af917863f6ffb623": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_3e4b5dc01997430fb5337d6110576498", "IPY_MODEL_2fb9cf319cec431d92af2774c4c9c81d", "IPY_MODEL_cba129f49ef345fbba7b015700f82f1a" ], "layout": "IPY_MODEL_60e89e3f654c4b64b8f1d3881ee2029f" } }, "3e4b5dc01997430fb5337d6110576498": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_4090ceafa8734d1fa3781d002938f6ba", "placeholder": "​", "style": "IPY_MODEL_a4c78830a97243008a6b8e7478d1ddef", "value": "  ...ert_geometric_features.pt: 100%" } }, "2fb9cf319cec431d92af2774c4c9c81d": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_cb6d5f43a0b34efcb7be97b6cf1e6a1f", "max": 76940145, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_8f029e8087804edf9e973504ca4845fb", "value": 76940145 } }, "cba129f49ef345fbba7b015700f82f1a": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_f07e8b54053f400fac173775938c1c48", "placeholder": "​", "style": "IPY_MODEL_8c55edfcd5e24faf81a8a6a08d0ff8e3", "value": " 76.9MB / 76.9MB            " } }, "60e89e3f654c4b64b8f1d3881ee2029f": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "4090ceafa8734d1fa3781d002938f6ba": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "a4c78830a97243008a6b8e7478d1ddef": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "cb6d5f43a0b34efcb7be97b6cf1e6a1f": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "8f029e8087804edf9e973504ca4845fb": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "f07e8b54053f400fac173775938c1c48": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "8c55edfcd5e24faf81a8a6a08d0ff8e3": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "8109359c35fa411b8af2da9ab5de23d9": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_2b2eae2c4e0d4f50bd7fe41c87783623", "IPY_MODEL_f32081acffb34428af5804fad25f324a", "IPY_MODEL_7a0183d109d349c9b1478c5ecab34ab5" ], "layout": "IPY_MODEL_acf388cc4b484a5fb3c1992e2cf83671" } }, "2b2eae2c4e0d4f50bd7fe41c87783623": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_693a0407b946478cbc1c01e35812836b", "placeholder": "​", "style": "IPY_MODEL_1368e6e49f0b40da8debb6dc833b68fa", "value": "Resolving data files: 100%" } }, "f32081acffb34428af5804fad25f324a": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_6b96660a08ee4593b3bbedf15bc343be", "max": 66, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_a677eb094f7844508fc40330f49e4d81", "value": 66 } }, "7a0183d109d349c9b1478c5ecab34ab5": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_46c274f611614099839ac4739c8b4526", "placeholder": "​", "style": "IPY_MODEL_1833901ffacc4b0db48ac24156666e22", "value": " 66/66 [00:00<00:00, 5232.47it/s]" } }, "acf388cc4b484a5fb3c1992e2cf83671": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "693a0407b946478cbc1c01e35812836b": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "1368e6e49f0b40da8debb6dc833b68fa": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "6b96660a08ee4593b3bbedf15bc343be": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "a677eb094f7844508fc40330f49e4d81": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "46c274f611614099839ac4739c8b4526": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "1833901ffacc4b0db48ac24156666e22": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "cb1703b325ef4758b894bc1ee7ea686e": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_942b9bde8a22414f8fb07202baaf77e6", "IPY_MODEL_57219c9c8dd04d42939eee9bf04672ba", "IPY_MODEL_abf227988c764efca54ac60f3bf6c2e1" ], "layout": "IPY_MODEL_cb7127277ccc48b99b00dd142a5cc337" } }, "942b9bde8a22414f8fb07202baaf77e6": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_c0d39303e1f7487ea28817e7ac79d74e", "placeholder": "​", "style": "IPY_MODEL_868024569e624fa3985089b615bf63f0", "value": "Resolving data files: 100%" } }, "57219c9c8dd04d42939eee9bf04672ba": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_cc3ca9e781994afdbc211a7090ea8c78", "max": 65, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_0216cc2b90074fb3b50b5d6801cc9a6a", "value": 65 } }, "abf227988c764efca54ac60f3bf6c2e1": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_289fb4145aaa4efe89d2485e30d4aa9e", "placeholder": "​", "style": "IPY_MODEL_f38d40c8bcfa4620a1fdde384aba7769", "value": " 65/65 [00:00<00:00, 6245.96it/s]" } }, "cb7127277ccc48b99b00dd142a5cc337": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "c0d39303e1f7487ea28817e7ac79d74e": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "868024569e624fa3985089b615bf63f0": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "cc3ca9e781994afdbc211a7090ea8c78": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "0216cc2b90074fb3b50b5d6801cc9a6a": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "289fb4145aaa4efe89d2485e30d4aa9e": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "f38d40c8bcfa4620a1fdde384aba7769": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "b26211cdfab94de0b62ab1df0e5db0b9": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HBoxModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HBoxView", "box_style": "", "children": [ "IPY_MODEL_d39165b2ae9c4e388f27caa2b2707888", "IPY_MODEL_90542f59a0274d83bb32e2ddd6efd774", "IPY_MODEL_019e21884d9b46a2ba71c4326f397345" ], "layout": "IPY_MODEL_bf904531bf5b4465a466c415709d561c" } }, "d39165b2ae9c4e388f27caa2b2707888": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_1f2cdd860249472a822050ab53fb8804", "placeholder": "​", "style": "IPY_MODEL_ac1011791cac4be7a563ecfc4614b3eb", "value": "Loading dataset shards: 100%" } }, "90542f59a0274d83bb32e2ddd6efd774": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "FloatProgressModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "ProgressView", "bar_style": "success", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_4d892ae8ef7a4d00a21c2e60948850f8", "max": 33, "min": 0, "orientation": "horizontal", "style": "IPY_MODEL_d2a3155f414144588edd9ca78604dfed", "value": 33 } }, "019e21884d9b46a2ba71c4326f397345": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "model_module_version": "1.5.0", "state": { "_dom_classes": [], "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "HTMLModel", "_view_count": null, "_view_module": "@jupyter-widgets/controls", "_view_module_version": "1.5.0", "_view_name": "HTMLView", "description": "", "description_tooltip": null, "layout": "IPY_MODEL_0ba551aeb35049d49b5cfde76b0aa5d0", "placeholder": "​", "style": "IPY_MODEL_51cb99c3c2f84530964e4c870f3c47e4", "value": " 33/33 [00:00<00:00, 1538.30it/s]" } }, "bf904531bf5b4465a466c415709d561c": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "1f2cdd860249472a822050ab53fb8804": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "ac1011791cac4be7a563ecfc4614b3eb": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } }, "4d892ae8ef7a4d00a21c2e60948850f8": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "d2a3155f414144588edd9ca78604dfed": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "ProgressStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "bar_color": null, "description_width": "" } }, "0ba551aeb35049d49b5cfde76b0aa5d0": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "model_module_version": "1.2.0", "state": { "_model_module": "@jupyter-widgets/base", "_model_module_version": "1.2.0", "_model_name": "LayoutModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "LayoutView", "align_content": null, "align_items": null, "align_self": null, "border": null, "bottom": null, "display": null, "flex": null, "flex_flow": null, "grid_area": null, "grid_auto_columns": null, "grid_auto_flow": null, "grid_auto_rows": null, "grid_column": null, "grid_gap": null, "grid_row": null, "grid_template_areas": null, "grid_template_columns": null, "grid_template_rows": null, "height": null, "justify_content": null, "justify_items": null, "left": null, "margin": null, "max_height": null, "max_width": null, "min_height": null, "min_width": null, "object_fit": null, "object_position": null, "order": null, "overflow": null, "overflow_x": null, "overflow_y": null, "padding": null, "right": null, "top": null, "visibility": null, "width": null } }, "51cb99c3c2f84530964e4c870f3c47e4": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "model_module_version": "1.5.0", "state": { "_model_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", "_model_name": "DescriptionStyleModel", "_view_count": null, "_view_module": "@jupyter-widgets/base", "_view_module_version": "1.2.0", "_view_name": "StyleView", "description_width": "" } } } } }, "cells": [ { "cell_type": "markdown", "source": [ "# geometric analyzer" ], "metadata": { "id": "MP8TLCP7QWcl" } }, { "cell_type": "code", "source": [ "\"\"\"\n", "Hierarchical Shape Generator - Two-Tier Gate Version\n", "======================================================\n", "Generates grids only. Patch analysis split into:\n", " - Local properties: intrinsic to each patch's voxels (no cross-patch info)\n", " - Structural properties: relational, require neighborhood context\n", "\n", "Colab Cell 1 of 3 - runs first, populates shared namespace.\n", "\"\"\"\n", "\n", "import numpy as np\n", "from typing import Dict, Optional\n", "from itertools import combinations\n", "\n", "# === Grid Constants ===========================================================\n", "GZ, GY, GX = 8, 16, 16\n", "GRID_SHAPE = (GZ, GY, GX)\n", "GRID_VOLUME = GZ * GY * GX\n", "\n", "PATCH_Z, PATCH_Y, PATCH_X = 2, 4, 4\n", "PATCH_VOL = PATCH_Z * PATCH_Y * PATCH_X\n", "MACRO_Z, MACRO_Y, MACRO_X = GZ // PATCH_Z, GY // PATCH_Y, GX // PATCH_X\n", "MACRO_N = MACRO_Z * MACRO_Y * MACRO_X\n", "\n", "# Worker budget (A100 Colab limit)\n", "MAX_WORKERS = 10\n", "\n", "_COORDS = np.mgrid[0:GZ, 0:GY, 0:GX].reshape(3, -1).T.astype(np.float64)\n", "\n", "# === Classes ==================================================================\n", "CLASS_NAMES = [\n", " \"point\", \"line\", \"corner\", \"cross\", \"arc\", \"helix\", \"circle\",\n", " \"triangle\", \"quad\", \"plane\", \"disc\",\n", " \"tetrahedron\", \"cube\", \"pyramid\", \"prism\", \"octahedron\", \"pentachoron\", \"wedge\",\n", " \"sphere\", \"hemisphere\", \"torus\", \"bowl\", \"saddle\", \"capsule\", \"cylinder\", \"cone\", \"channel\"\n", "]\n", "NUM_CLASSES = len(CLASS_NAMES)\n", "CLASS_TO_IDX = {n: i for i, n in enumerate(CLASS_NAMES)}\n", "\n", "# === Two-Tier Gate Constants ==================================================\n", "\n", "# Local gates: intrinsic to each patch, no cross-patch info needed\n", "# dims: 4 classes (0D point, 1D line, 2D surface, 3D volume)\n", "# curvature: 3 classes (rigid, curved, combined)\n", "# boundary: 1 binary (partial fill = surface patch)\n", "# axis_active: 3 binary (which axes have extent > 1 voxel)\n", "NUM_LOCAL_DIMS = 4\n", "NUM_LOCAL_CURVS = 3\n", "NUM_LOCAL_BOUNDARY = 1\n", "NUM_LOCAL_AXES = 3\n", "LOCAL_GATE_DIM = NUM_LOCAL_DIMS + NUM_LOCAL_CURVS + NUM_LOCAL_BOUNDARY + NUM_LOCAL_AXES # 11\n", "\n", "# Structural gates: relational, require neighborhood context (post-attention)\n", "# topology: 2 classes (open / closed based on neighbor count)\n", "# neighbor_ct: 1 continuous (normalized 0-1, raw count / 6)\n", "# surface_role: 3 classes (isolated 0-1 neighbors, boundary 2-4, interior 5-6)\n", "NUM_STRUCT_TOPO = 2\n", "NUM_STRUCT_NEIGHBOR = 1\n", "NUM_STRUCT_ROLE = 3\n", "STRUCTURAL_GATE_DIM = NUM_STRUCT_TOPO + NUM_STRUCT_NEIGHBOR + NUM_STRUCT_ROLE # 6\n", "\n", "TOTAL_GATE_DIM = LOCAL_GATE_DIM + STRUCTURAL_GATE_DIM # 17\n", "\n", "# Legacy compat\n", "GATES = [\"rigid\", \"curved\", \"combined\", \"open\", \"closed\"]\n", "NUM_GATES = len(GATES)\n", "\n", "\n", "# === Rasterization ============================================================\n", "def rasterize_line(p1, p2):\n", " p1, p2 = np.array(p1, dtype=float), np.array(p2, dtype=float)\n", " n = max(int(np.max(np.abs(p2 - p1))) + 1, 2)\n", " t = np.linspace(0, 1, n)[:, None]\n", " pts = np.round(p1 + t * (p2 - p1)).astype(int)\n", " return np.clip(pts, [0, 0, 0], [GZ-1, GY-1, GX-1])\n", "\n", "\n", "def rasterize_edges(verts, edges):\n", " pts = []\n", " for i, j in edges:\n", " pts.append(rasterize_line(verts[i], verts[j]))\n", " return np.concatenate(pts)\n", "\n", "\n", "def rasterize_faces(verts, faces, density=1.0):\n", " pts = []\n", " for f in faces:\n", " v0, v1, v2 = [np.array(verts[i], dtype=float) for i in f[:3]]\n", " e1, e2 = v1 - v0, v2 - v0\n", " n = max(int(max(np.linalg.norm(e1), np.linalg.norm(e2)) * density) + 1, 3)\n", " for u in np.linspace(0, 1, n):\n", " for v in np.linspace(0, 1 - u, max(int(n * (1 - u)), 1)):\n", " p = np.round(v0 + u * e1 + v * e2).astype(int)\n", " pts.append(p)\n", " return np.clip(np.array(pts), [0, 0, 0], [GZ-1, GY-1, GX-1])\n", "\n", "\n", "def rasterize_sphere(c, r, fill=True, half=False, zmin=None, zmax=None):\n", " c = np.array(c, dtype=float)\n", " pts = []\n", " zr = range(max(0, int(c[0] - r)), min(GZ, int(c[0] + r) + 1))\n", " for z in zr:\n", " for y in range(max(0, int(c[1] - r)), min(GY, int(c[1] + r) + 1)):\n", " for x in range(max(0, int(c[2] - r)), min(GX, int(c[2] + r) + 1)):\n", " d = np.sqrt((z - c[0])**2 + (y - c[1])**2 + (x - c[2])**2)\n", " if fill and d <= r:\n", " if zmin is not None and z < zmin: continue\n", " if zmax is not None and z > zmax: continue\n", " pts.append([z, y, x])\n", " elif not fill and abs(d - r) < 0.8:\n", " if half and z < c[0]: continue\n", " pts.append([z, y, x])\n", " return np.array(pts) if pts else np.zeros((0, 3), dtype=int)\n", "\n", "\n", "# === Shape Generators =========================================================\n", "class HierarchicalShapeGenerator:\n", " def __init__(self, seed=42):\n", " self.rng = np.random.RandomState(seed)\n", "\n", " def _random_center(self, margin=3):\n", " return [self.rng.randint(max(1, margin//2), max(2, GZ - margin//2)),\n", " self.rng.randint(margin, GY - margin),\n", " self.rng.randint(margin, GX - margin)]\n", "\n", " def _to_grid(self, pts):\n", " if len(pts) == 0: return None, None\n", " grid = np.zeros(GRID_SHAPE, dtype=np.float32)\n", " pts = np.clip(np.array(pts).astype(int), [0, 0, 0], [GZ-1, GY-1, GX-1])\n", " grid[pts[:, 0], pts[:, 1], pts[:, 2]] = 1.0\n", " return grid, pts\n", "\n", " def generate(self, name):\n", " r = self.rng\n", " c = self._random_center()\n", "\n", " try:\n", " if name == \"point\":\n", " pts = [c]\n", " elif name == \"line\":\n", " axis = r.randint(0, 3)\n", " p1, p2 = list(c), list(c)\n", " L = r.randint(4, [GZ, GY, GX][axis])\n", " p1[axis] = max(0, c[axis] - L//2)\n", " p2[axis] = min([GZ, GY, GX][axis] - 1, c[axis] + L//2)\n", " pts = rasterize_line(p1, p2)\n", " elif name == \"corner\":\n", " L = r.randint(3, 7)\n", " p1, p2 = list(c), list(c)\n", " p1[1] = max(0, c[1] - L)\n", " p2[2] = min(GX - 1, c[2] + L)\n", " pts = np.concatenate([rasterize_line(c, p1), rasterize_line(c, p2)])\n", " elif name == \"cross\":\n", " L = r.randint(2, 5)\n", " pts = []\n", " for d in range(3):\n", " p1, p2 = list(c), list(c)\n", " p1[d] = max(0, c[d] - L)\n", " p2[d] = min([GZ, GY, GX][d] - 1, c[d] + L)\n", " pts.append(rasterize_line(p1, p2))\n", " pts = np.concatenate(pts)\n", " elif name == \"arc\":\n", " R = r.uniform(2, 5)\n", " t = np.linspace(0, np.pi * r.uniform(0.4, 0.9), 30)\n", " pts = np.round(np.column_stack([c[0] + np.zeros_like(t), c[1] + R*np.cos(t), c[2] + R*np.sin(t)])).astype(int)\n", " elif name == \"helix\":\n", " R, H = r.uniform(2, 4), r.uniform(3, GZ - 2)\n", " t = np.linspace(0, 4*np.pi, 60)\n", " pts = np.round(np.column_stack([c[0] - H/2 + t/(4*np.pi)*H, c[1] + R*np.cos(t), c[2] + R*np.sin(t)])).astype(int)\n", " elif name == \"circle\":\n", " R = r.uniform(2, 5)\n", " t = np.linspace(0, 2*np.pi, 40)\n", " pts = np.round(np.column_stack([np.full_like(t, c[0]), c[1] + R*np.cos(t), c[2] + R*np.sin(t)])).astype(int)\n", " elif name == \"triangle\":\n", " s = r.uniform(3, 6)\n", " v = [[c[0], c[1] - s, c[2]], [c[0], c[1] + s//2, c[2] - s], [c[0], c[1] + s//2, c[2] + s]]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,0)])\n", " elif name == \"quad\":\n", " s = r.randint(2, 5)\n", " v = [[c[0], c[1]-s, c[2]-s], [c[0], c[1]-s, c[2]+s], [c[0], c[1]+s, c[2]+s], [c[0], c[1]+s, c[2]-s]]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,3),(3,0)])\n", " elif name == \"plane\":\n", " s = r.randint(2, 5)\n", " pts = rasterize_faces([[c[0],c[1]-s,c[2]-s],[c[0],c[1]-s,c[2]+s],[c[0],c[1]+s,c[2]+s],[c[0],c[1]+s,c[2]-s]], [(0,1,2),(0,2,3)])\n", " elif name == \"disc\":\n", " R = r.uniform(2, 5)\n", " pts = rasterize_sphere(c, R, fill=True)\n", " pts = pts[pts[:, 0] == c[0]] if len(pts) > 0 else pts\n", " elif name == \"tetrahedron\":\n", " s = r.uniform(3, 5)\n", " v = [[c[0]+s,c[1],c[2]], [c[0]-s//2,c[1]+s,c[2]], [c[0]-s//2,c[1]-s//2,c[2]+s], [c[0]-s//2,c[1]-s//2,c[2]-s]]\n", " pts = rasterize_edges(v, [(0,1),(0,2),(0,3),(1,2),(1,3),(2,3)])\n", " elif name == \"cube\":\n", " s = r.randint(2, 4)\n", " v = [[c[0]+d[0]*s, c[1]+d[1]*s, c[2]+d[2]*s] for d in [(-1,-1,-1),(-1,-1,1),(-1,1,1),(-1,1,-1),(1,-1,-1),(1,-1,1),(1,1,1),(1,1,-1)]]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(6,7),(7,4),(0,4),(1,5),(2,6),(3,7)])\n", " elif name == \"pyramid\":\n", " s = r.randint(2, 4)\n", " base = [[c[0]-s,c[1]-s,c[2]-s],[c[0]-s,c[1]-s,c[2]+s],[c[0]-s,c[1]+s,c[2]+s],[c[0]-s,c[1]+s,c[2]-s]]\n", " apex = [c[0]+s, c[1], c[2]]\n", " v = base + [apex]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,3),(3,0),(0,4),(1,4),(2,4),(3,4)])\n", " elif name == \"prism\":\n", " s, h = r.randint(2, 4), r.randint(2, 4)\n", " bottom = [[c[0]-h,c[1]-s,c[2]], [c[0]-h,c[1]+s//2,c[2]-s], [c[0]-h,c[1]+s//2,c[2]+s]]\n", " top = [[b[0]+2*h, b[1], b[2]] for b in bottom]\n", " v = bottom + top\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,0),(3,4),(4,5),(5,3),(0,3),(1,4),(2,5)])\n", " elif name == \"octahedron\":\n", " s = r.uniform(2, 4)\n", " v = [[c[0]+s,c[1],c[2]],[c[0]-s,c[1],c[2]],[c[0],c[1]+s,c[2]],[c[0],c[1]-s,c[2]],[c[0],c[1],c[2]+s],[c[0],c[1],c[2]-s]]\n", " pts = rasterize_edges(v, [(0,2),(0,3),(0,4),(0,5),(1,2),(1,3),(1,4),(1,5),(2,4),(2,5),(3,4),(3,5)])\n", " elif name == \"pentachoron\":\n", " s = r.uniform(2, 4)\n", " v = [[c[0]+s,c[1],c[2]],[c[0]-s//2,c[1]+s,c[2]],[c[0]-s//2,c[1]-s//2,c[2]+s],[c[0]-s//2,c[1]-s//2,c[2]-s],[c[0],c[1],c[2]]]\n", " pts = rasterize_edges(v, [(i,j) for i in range(5) for j in range(i+1,5)])\n", " elif name == \"wedge\":\n", " s = r.randint(2, 4)\n", " v = [[c[0]-s,c[1]-s,c[2]-s],[c[0]-s,c[1]+s,c[2]-s],[c[0]-s,c[1],c[2]+s],[c[0]+s,c[1]-s,c[2]-s],[c[0]+s,c[1]+s,c[2]-s],[c[0]+s,c[1],c[2]+s]]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,0),(3,4),(4,5),(5,3),(0,3),(1,4),(2,5)])\n", " elif name == \"sphere\":\n", " R = r.uniform(2, min(3.5, GZ//2 - 1))\n", " pts = rasterize_sphere(c, R, fill=False)\n", " elif name == \"hemisphere\":\n", " R = r.uniform(2, min(3.5, GZ//2 - 1))\n", " pts = rasterize_sphere(c, R, fill=False, half=True)\n", " elif name == \"torus\":\n", " R, rr = r.uniform(3, 5), r.uniform(1, 2)\n", " t = np.linspace(0, 2*np.pi, 40)\n", " p = np.linspace(0, 2*np.pi, 20)\n", " T, P = np.meshgrid(t, p)\n", " pts = np.round(np.column_stack([c[0] + rr*np.sin(P.ravel()), c[1] + (R+rr*np.cos(P.ravel()))*np.cos(T.ravel()), c[2] + (R+rr*np.cos(P.ravel()))*np.sin(T.ravel())])).astype(int)\n", " elif name == \"bowl\":\n", " R = r.uniform(2, 4)\n", " pts = rasterize_sphere(c, R, fill=False)\n", " pts = pts[pts[:, 0] >= c[0]] if len(pts) > 0 else pts\n", " elif name == \"saddle\":\n", " s = r.uniform(2, 4)\n", " Y, X = np.mgrid[-s:s:0.5, -s:s:0.5]\n", " Z = (Y**2 - X**2) / (2*s)\n", " pts = np.round(np.column_stack([c[0] + Z.ravel(), c[1] + Y.ravel(), c[2] + X.ravel()])).astype(int)\n", " elif name == \"capsule\":\n", " R, H = r.uniform(1.5, 3), r.uniform(2, 4)\n", " shell = rasterize_sphere(c, R, fill=False)\n", " body = []\n", " for z in range(max(0, int(c[0]-H//2)), min(GZ, int(c[0]+H//2)+1)):\n", " for y in range(GY):\n", " for x in range(GX):\n", " if abs(np.sqrt((y-c[1])**2 + (x-c[2])**2) - R) < 0.8:\n", " body.append([z, y, x])\n", " pts = np.concatenate([shell, np.array(body) if body else np.zeros((0,3), dtype=int)])\n", " elif name == \"cylinder\":\n", " R, H = r.uniform(2, 4), r.uniform(3, GZ - 2)\n", " pts = []\n", " for z in range(max(0, int(c[0]-H/2)), min(GZ, int(c[0]+H/2)+1)):\n", " for y in range(GY):\n", " for x in range(GX):\n", " d = np.sqrt((y-c[1])**2 + (x-c[2])**2)\n", " if abs(d - R) < 0.8:\n", " pts.append([z, y, x])\n", " pts = np.array(pts) if pts else np.zeros((0,3), dtype=int)\n", " elif name == \"cone\":\n", " R, H = r.uniform(2, 4), r.uniform(3, GZ - 2)\n", " pts = []\n", " for z in range(max(0, int(c[0]-H/2)), min(GZ, int(c[0]+H/2)+1)):\n", " frac = 1 - (z - (c[0]-H/2)) / H\n", " cr = R * frac\n", " for y in range(GY):\n", " for x in range(GX):\n", " d = np.sqrt((y-c[1])**2 + (x-c[2])**2)\n", " if abs(d - cr) < 0.8 and cr > 0.3:\n", " pts.append([z, y, x])\n", " pts = np.array(pts) if pts else np.zeros((0,3), dtype=int)\n", " elif name == \"channel\":\n", " R = r.uniform(2, 4)\n", " L = r.randint(6, GX - 2)\n", " pts = []\n", " for z in range(GZ):\n", " for x in range(max(0, c[2]-L//2), min(GX, c[2]+L//2)):\n", " for y in range(GY):\n", " d = np.sqrt((z - c[0])**2 + (y - c[1])**2)\n", " if abs(d - R) < 0.8:\n", " pts.append([z, y, x])\n", " pts = np.array(pts) if pts else np.zeros((0,3), dtype=int)\n", " else:\n", " return None\n", " except Exception:\n", " return None\n", "\n", " grid, pts = self._to_grid(pts)\n", " if grid is not None and pts is not None and len(pts) > 0:\n", " return {\"grid\": grid, \"class_idx\": CLASS_TO_IDX[name]}\n", " return None\n", "\n", " def generate_multi(self, n_shapes: int = None) -> Optional[Dict]:\n", " if n_shapes is None:\n", " n_shapes = self.rng.randint(2, 5)\n", " names = list(self.rng.choice(CLASS_NAMES, size=n_shapes, replace=False))\n", " shapes = [s for s in [self.generate(n) for n in names] if s is not None]\n", " if len(shapes) < 2:\n", " return None\n", " grid = np.zeros(GRID_SHAPE, dtype=np.float32)\n", " membership = np.zeros((MACRO_N, NUM_CLASSES), dtype=np.float32)\n", " for s in shapes:\n", " pts = np.argwhere(s[\"grid\"] > 0.5)\n", " grid[pts[:, 0], pts[:, 1], pts[:, 2]] = 1.0\n", " patch_idx = (pts[:, 0]//PATCH_Z) * (MACRO_Y*MACRO_X) + (pts[:, 1]//PATCH_Y) * MACRO_X + (pts[:, 2]//PATCH_X)\n", " np.add.at(membership[:, s[\"class_idx\"]], patch_idx, 1.0)\n", " return {\"grid\": grid, \"membership\": (membership > 0).astype(np.float32), \"n_shapes\": len(shapes)}\n", "\n", "\n", "def _worker(args):\n", " seed, min_s, max_s = args\n", " gen = HierarchicalShapeGenerator(seed)\n", " return gen.generate_multi(gen.rng.randint(min_s, max_s + 1))\n", "\n", "\n", "def generate_dataset(n_samples: int, seed: int = 42, num_workers: int = MAX_WORKERS) -> Dict:\n", " from multiprocessing import Pool\n", " try:\n", " from tqdm import tqdm\n", " use_tqdm = True\n", " except ImportError:\n", " use_tqdm = False\n", "\n", " tasks = [(seed * 10000 + i, 2, 4) for i in range(n_samples * 2)]\n", " grids, memberships, n_shapes = [], [], []\n", "\n", " with Pool(num_workers) as pool:\n", " pbar = tqdm(total=n_samples, desc=\"Generating\") if use_tqdm else None\n", " for r in pool.imap_unordered(_worker, tasks):\n", " if r is not None and len(grids) < n_samples:\n", " grids.append(r[\"grid\"])\n", " memberships.append(r[\"membership\"])\n", " n_shapes.append(r[\"n_shapes\"])\n", " if pbar: pbar.update(1)\n", " if len(grids) >= n_samples:\n", " break\n", " if pbar: pbar.close()\n", "\n", " return {\"grids\": np.array(grids), \"memberships\": np.array(memberships), \"n_shapes\": np.array(n_shapes)}\n", "\n", "\n", "# === Patch Analysis: Two-Tier =================================================\n", "\n", "def analyze_local_patches(grids):\n", " \"\"\"\n", " Local patch properties — intrinsic to each patch's voxels.\n", " No cross-patch information. Computable from raw patch data.\n", "\n", " Returns:\n", " occupancy: (N, 64) float — mean voxel density\n", " dims: (N, 64) long — 0-3 (axis extent counting)\n", " curvature: (N, 64) long — 0=rigid, 1=curved, 2=combined\n", " boundary: (N, 64) float — 1.0 if partial fill (surface patch)\n", " axis_active: (N, 64, 3) float — which axes have extent > 1\n", " fill_ratio: (N, 64) float — voxels / bounding_box_volume\n", " \"\"\"\n", " import torch\n", "\n", " if isinstance(grids, np.ndarray):\n", " grids = torch.from_numpy(grids).float()\n", "\n", " device, N = grids.device, grids.shape[0]\n", " patches = grids.view(N, MACRO_Z, PATCH_Z, MACRO_Y, PATCH_Y, MACRO_X, PATCH_X)\n", " patches = patches.permute(0, 1, 3, 5, 2, 4, 6).contiguous().view(N, MACRO_N, PATCH_Z, PATCH_Y, PATCH_X)\n", "\n", " occupancy = patches.sum(dim=(2, 3, 4)) / PATCH_VOL\n", " occ_mask = occupancy > 0.01\n", " occ = patches > 0.5\n", "\n", " z_c = torch.arange(PATCH_Z, device=device).view(1, 1, PATCH_Z, 1, 1).float()\n", " y_c = torch.arange(PATCH_Y, device=device).view(1, 1, 1, PATCH_Y, 1).float()\n", " x_c = torch.arange(PATCH_X, device=device).view(1, 1, 1, 1, PATCH_X).float()\n", " INF = 1000.0\n", "\n", " z_ext = torch.where(occ, z_c.expand_as(patches), torch.full_like(patches, -INF)).amax(dim=(2,3,4)) - torch.where(occ, z_c.expand_as(patches), torch.full_like(patches, INF)).amin(dim=(2,3,4))\n", " y_ext = torch.where(occ, y_c.expand_as(patches), torch.full_like(patches, -INF)).amax(dim=(2,3,4)) - torch.where(occ, y_c.expand_as(patches), torch.full_like(patches, INF)).amin(dim=(2,3,4))\n", " x_ext = torch.where(occ, x_c.expand_as(patches), torch.full_like(patches, -INF)).amax(dim=(2,3,4)) - torch.where(occ, x_c.expand_as(patches), torch.full_like(patches, INF)).amin(dim=(2,3,4))\n", "\n", " ext_sorted, _ = torch.stack([z_ext, y_ext, x_ext], dim=-1).clamp(min=0).sort(dim=-1, descending=True)\n", " dims = torch.zeros(N, MACRO_N, dtype=torch.long, device=device)\n", " dims = torch.where(ext_sorted[..., 0] >= 1, torch.tensor(1, device=device), dims)\n", " dims = torch.where(ext_sorted[..., 1] >= 1, torch.tensor(2, device=device), dims)\n", " dims = torch.where(ext_sorted[..., 2] >= 1, torch.tensor(3, device=device), dims)\n", " dims = torch.where(~occ_mask, torch.tensor(-1, device=device), dims)\n", "\n", " voxels = patches.sum(dim=(2, 3, 4))\n", " bb_vol = ((z_ext + 1) * (y_ext + 1) * (x_ext + 1)).clamp(min=1)\n", " fill_ratio = voxels / bb_vol\n", " curvature = torch.where(fill_ratio > 0.6, 0, torch.where(fill_ratio < 0.3, 1, 2)).long()\n", "\n", " boundary = ((occupancy > 0.01) & (occupancy < 0.9)).float()\n", "\n", " axis_active = torch.stack([\n", " (z_ext.clamp(min=0) >= 1).float(),\n", " (y_ext.clamp(min=0) >= 1).float(),\n", " (x_ext.clamp(min=0) >= 1).float(),\n", " ], dim=-1)\n", "\n", " return {\n", " \"occupancy\": occupancy,\n", " \"dims\": dims,\n", " \"curvature\": curvature,\n", " \"boundary\": boundary,\n", " \"axis_active\": axis_active,\n", " \"fill_ratio\": fill_ratio,\n", " }\n", "\n", "\n", "def analyze_structural_patches(grids, local_data):\n", " \"\"\"\n", " Structural patch properties — relational, require neighborhood context.\n", " Ground truth targets for post-attention heads.\n", "\n", " Returns:\n", " topology: (N, 64) long — 0=open (<= 3 neighbors), 1=closed (> 3)\n", " neighbor_count: (N, 64) float — normalized 0-1 (raw count / 6)\n", " surface_role: (N, 64) long — 0=isolated (0-1), 1=boundary (2-4), 2=interior (5-6)\n", " \"\"\"\n", " import torch\n", " import torch.nn.functional as F\n", "\n", " if isinstance(grids, np.ndarray):\n", " grids = torch.from_numpy(grids).float()\n", "\n", " device, N = grids.device, grids.shape[0]\n", " occ_mask = local_data[\"occupancy\"] > 0.01\n", "\n", " occ_3d = occ_mask.float().view(N, 1, MACRO_Z, MACRO_Y, MACRO_X)\n", " kernel = torch.zeros(1, 1, 3, 3, 3, device=device)\n", " kernel[0, 0, 1, 1, 0] = kernel[0, 0, 1, 1, 2] = 1\n", " kernel[0, 0, 1, 0, 1] = kernel[0, 0, 1, 2, 1] = 1\n", " kernel[0, 0, 0, 1, 1] = kernel[0, 0, 2, 1, 1] = 1\n", " raw_count = F.conv3d(occ_3d, kernel, padding=1).view(N, MACRO_N)\n", "\n", " topology = (raw_count > 3).long()\n", " neighbor_count = raw_count / 6.0\n", "\n", " surface_role = torch.zeros(N, MACRO_N, dtype=torch.long, device=device)\n", " surface_role = torch.where(raw_count >= 2, torch.tensor(1, device=device), surface_role)\n", " surface_role = torch.where(raw_count >= 5, torch.tensor(2, device=device), surface_role)\n", "\n", " return {\n", " \"topology\": topology,\n", " \"neighbor_count\": neighbor_count,\n", " \"surface_role\": surface_role,\n", " }\n", "\n", "\n", "def analyze_patches_torch(grids):\n", " \"\"\"Combined analysis — returns both local and structural properties.\"\"\"\n", " local_data = analyze_local_patches(grids)\n", " struct_data = analyze_structural_patches(grids, local_data)\n", "\n", " import torch\n", " N = local_data[\"occupancy\"].shape[0]\n", " device = local_data[\"occupancy\"].device\n", " labels = torch.zeros(N, MACRO_N, NUM_GATES, device=device)\n", " labels[..., 0] = (local_data[\"curvature\"] == 0).float()\n", " labels[..., 1] = (local_data[\"curvature\"] == 1).float()\n", " labels[..., 2] = (local_data[\"curvature\"] == 2).float()\n", " labels[..., 3] = (struct_data[\"topology\"] == 0).float()\n", " labels[..., 4] = (struct_data[\"topology\"] == 1).float()\n", "\n", " return {\n", " # Local\n", " \"patch_occupancy\": local_data[\"occupancy\"],\n", " \"patch_dims\": local_data[\"dims\"],\n", " \"patch_curvature\": local_data[\"curvature\"],\n", " \"patch_boundary\": local_data[\"boundary\"],\n", " \"patch_axis_active\": local_data[\"axis_active\"],\n", " \"patch_fill_ratio\": local_data[\"fill_ratio\"],\n", " # Structural\n", " \"patch_topology\": struct_data[\"topology\"],\n", " \"patch_neighbor_count\": struct_data[\"neighbor_count\"],\n", " \"patch_surface_role\": struct_data[\"surface_role\"],\n", " # Legacy\n", " \"patch_labels\": labels,\n", " }\n", "\n", "\n", "# === Dataset ==================================================================\n", "import torch\n", "from torch.utils.data import Dataset\n", "\n", "\n", "class ShapeDataset(Dataset):\n", " def __init__(self, grids, memberships, patch_data):\n", " self.grids = grids\n", " self.memberships = memberships\n", "\n", " # Local\n", " self.patch_occupancy = patch_data[\"patch_occupancy\"]\n", " self.patch_dims = patch_data[\"patch_dims\"]\n", " self.patch_curvature = patch_data[\"patch_curvature\"]\n", " self.patch_boundary = patch_data[\"patch_boundary\"]\n", " self.patch_axis_active = patch_data[\"patch_axis_active\"]\n", " self.patch_fill_ratio = patch_data[\"patch_fill_ratio\"]\n", "\n", " # Structural\n", " self.patch_topology = patch_data[\"patch_topology\"]\n", " self.patch_neighbor_count = patch_data[\"patch_neighbor_count\"]\n", " self.patch_surface_role = patch_data[\"patch_surface_role\"]\n", "\n", " # Legacy\n", " self.patch_labels = patch_data[\"patch_labels\"]\n", "\n", " # Derived global targets\n", " self.patch_shape_count = (memberships > 0).sum(dim=-1).long()\n", " self.global_shapes = (memberships.sum(dim=1) > 0).float()\n", " occ_mask = self.patch_occupancy > 0.01\n", " occ_count = occ_mask.sum(dim=1, keepdim=True).clamp(min=1)\n", " self.global_gates = (self.patch_labels * occ_mask.unsqueeze(-1)).sum(dim=1) / occ_count\n", "\n", " def __len__(self):\n", " return len(self.grids)\n", "\n", " def __getitem__(self, idx):\n", " return {\n", " \"grid\": self.grids[idx],\n", " \"patch_shape_membership\": self.memberships[idx],\n", " \"patch_shape_count\": self.patch_shape_count[idx],\n", " # Local\n", " \"patch_occupancy\": self.patch_occupancy[idx],\n", " \"patch_dims\": self.patch_dims[idx],\n", " \"patch_curvature\": self.patch_curvature[idx],\n", " \"patch_boundary\": self.patch_boundary[idx],\n", " \"patch_axis_active\": self.patch_axis_active[idx],\n", " \"patch_fill_ratio\": self.patch_fill_ratio[idx],\n", " # Structural\n", " \"patch_topology\": self.patch_topology[idx],\n", " \"patch_neighbor_count\": self.patch_neighbor_count[idx],\n", " \"patch_surface_role\": self.patch_surface_role[idx],\n", " # Legacy\n", " \"patch_labels\": self.patch_labels[idx],\n", " # Global\n", " \"global_shapes\": self.global_shapes[idx],\n", " \"global_gates\": self.global_gates[idx],\n", " }\n", "\n", "\n", "def collate_fn(batch):\n", " return {k: torch.stack([b[k] for b in batch]) for k in batch[0].keys()}\n", "\n", "\n", "print(f\"✓ Generator ready | Local: {LOCAL_GATE_DIM}d | Structural: {STRUCTURAL_GATE_DIM}d | Total: {TOTAL_GATE_DIM}d\")\n", "\n", "\"\"\"\n", "Superposition Patch Classifier - Two-Tier Gated Transformer\n", "=============================================================\n", "Colab Cell 2 of 3 - depends on Cell 1 (generator.py) namespace.\n", "\n", "Architecture:\n", " voxels → patch_embed → e₀\n", "\n", " Stage 0 (local gates): From raw embeddings, no attention\n", " e₀ → local_dim_head → dim_soft ─┐\n", " e₀ → local_curv_head → curv_soft ─┤ LOCAL_GATE_DIM = 11\n", " e₀ → local_bound_head → bound_soft ─┤\n", " e₀ → local_axis_head → axis_soft ─┘→ local_gates (detached)\n", "\n", " Stage 1 (bootstrap): Attention sees local gates\n", " proj([e₀, local_gates]) → bootstrap_block × N → h\n", "\n", " Stage 1.5 (structural gates): From h, after cross-patch context\n", " h → struct_topo_head → topo_soft ─┐\n", " h → struct_neighbor_head → neighbor_soft ─┤ STRUCTURAL_GATE_DIM = 6\n", " h → struct_role_head → role_soft ─┘→ structural_gates (detached)\n", "\n", " Stage 2 (geometric routing): Both gate tiers\n", " (h, local_gates, structural_gates) → geometric_block × N → h'\n", "\n", " Stage 3 (classification): Gated shape heads\n", " [h', local_gates, structural_gates] → shape_heads\n", "\"\"\"\n", "\n", "import math\n", "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "\n", "# Cell 1 provides: all constants including LOCAL_GATE_DIM, STRUCTURAL_GATE_DIM, TOTAL_GATE_DIM\n", "\n", "\n", "# === Patch Embedding ==========================================================\n", "\n", "class PatchEmbedding3D(nn.Module):\n", " def __init__(self, patch_dim=64):\n", " super().__init__()\n", " self.proj = nn.Linear(PATCH_VOL, patch_dim)\n", " pz = torch.arange(MACRO_Z).float() / MACRO_Z\n", " py = torch.arange(MACRO_Y).float() / MACRO_Y\n", " px = torch.arange(MACRO_X).float() / MACRO_X\n", " pos = torch.stack(torch.meshgrid(pz, py, px, indexing='ij'), dim=-1).reshape(MACRO_N, 3)\n", " self.register_buffer('pos_embed', pos)\n", " self.pos_proj = nn.Linear(3, patch_dim)\n", "\n", " def forward(self, x):\n", " B = x.shape[0]\n", " patches = x.view(B, MACRO_Z, PATCH_Z, MACRO_Y, PATCH_Y, MACRO_X, PATCH_X)\n", " patches = patches.permute(0, 1, 3, 5, 2, 4, 6).contiguous().view(B, MACRO_N, PATCH_VOL)\n", " return self.proj(patches) + self.pos_proj(self.pos_embed)\n", "\n", "\n", "# === Standard Transformer Block ===============================================\n", "\n", "class TransformerBlock(nn.Module):\n", " def __init__(self, dim, n_heads, dropout=0.1):\n", " super().__init__()\n", " self.attn = nn.MultiheadAttention(dim, n_heads, dropout=dropout, batch_first=True)\n", " self.ff = nn.Sequential(\n", " nn.Linear(dim, dim * 4), nn.GELU(), nn.Dropout(dropout),\n", " nn.Linear(dim * 4, dim), nn.Dropout(dropout)\n", " )\n", " self.ln1, self.ln2 = nn.LayerNorm(dim), nn.LayerNorm(dim)\n", "\n", " def forward(self, x):\n", " x = x + self.attn(self.ln1(x), self.ln1(x), self.ln1(x))[0]\n", " return x + self.ff(self.ln2(x))\n", "\n", "\n", "# === Geometric Gated Attention ================================================\n", "\n", "class GatedGeometricAttention(nn.Module):\n", " \"\"\"\n", " Multi-head attention with two-tier gate modulation.\n", " Q, K see both local and structural gates.\n", " V modulated by combined gate vector.\n", " Per-head compatibility bias from gate interactions.\n", " \"\"\"\n", "\n", " def __init__(self, embed_dim, gate_dim, n_heads, dropout=0.1):\n", " super().__init__()\n", " self.embed_dim = embed_dim\n", " self.n_heads = n_heads\n", " self.head_dim = embed_dim // n_heads\n", "\n", " # Q, K from [h, all_gates]\n", " self.q_proj = nn.Linear(embed_dim + gate_dim, embed_dim)\n", " self.k_proj = nn.Linear(embed_dim + gate_dim, embed_dim)\n", " self.v_proj = nn.Linear(embed_dim, embed_dim)\n", "\n", " # Per-head gate compatibility\n", " self.gate_q = nn.Linear(gate_dim, n_heads)\n", " self.gate_k = nn.Linear(gate_dim, n_heads)\n", "\n", " # Value modulation by gates\n", " self.v_gate = nn.Sequential(nn.Linear(gate_dim, embed_dim), nn.Sigmoid())\n", "\n", " self.out_proj = nn.Linear(embed_dim, embed_dim)\n", " self.attn_drop = nn.Dropout(dropout)\n", " self.scale = math.sqrt(self.head_dim)\n", "\n", " def forward(self, h, gate_features):\n", " B, N, _ = h.shape\n", " hg = torch.cat([h, gate_features], dim=-1)\n", " Q = self.q_proj(hg).view(B, N, self.n_heads, self.head_dim).transpose(1, 2)\n", " K = self.k_proj(hg).view(B, N, self.n_heads, self.head_dim).transpose(1, 2)\n", "\n", " V = self.v_proj(h)\n", " V = (V * self.v_gate(gate_features)).view(B, N, self.n_heads, self.head_dim).transpose(1, 2)\n", "\n", " content_scores = (Q @ K.transpose(-2, -1)) / self.scale\n", " gq = self.gate_q(gate_features)\n", " gk = self.gate_k(gate_features)\n", " compat = torch.einsum('bih,bjh->bhij', gq, gk)\n", "\n", " attn = F.softmax(content_scores + compat, dim=-1)\n", " attn = self.attn_drop(attn)\n", "\n", " out = (attn @ V).transpose(1, 2).reshape(B, N, self.embed_dim)\n", " return self.out_proj(out)\n", "\n", "\n", "class GeometricTransformerBlock(nn.Module):\n", " def __init__(self, embed_dim, gate_dim, n_heads, dropout=0.1, ff_mult=4):\n", " super().__init__()\n", " self.ln1 = nn.LayerNorm(embed_dim)\n", " self.attn = GatedGeometricAttention(embed_dim, gate_dim, n_heads, dropout)\n", " self.ln2 = nn.LayerNorm(embed_dim)\n", " self.ff = nn.Sequential(\n", " nn.Linear(embed_dim, embed_dim * ff_mult), nn.GELU(), nn.Dropout(dropout),\n", " nn.Linear(embed_dim * ff_mult, embed_dim), nn.Dropout(dropout)\n", " )\n", "\n", " def forward(self, h, gate_features):\n", " h = h + self.attn(self.ln1(h), gate_features)\n", " h = h + self.ff(self.ln2(h))\n", " return h\n", "\n", "\n", "# === Main Classifier ==========================================================\n", "\n", "class SuperpositionPatchClassifier(nn.Module):\n", " \"\"\"\n", " Two-tier gated transformer for multi-shape superposition.\n", "\n", " Tier 1 (local): Gates from raw patch embeddings — what IS in this patch\n", " Tier 2 (structural): Gates from post-attention h — what ROLE this patch plays\n", "\n", " Both tiers feed into geometric attention and classification.\n", " \"\"\"\n", "\n", " def __init__(self, embed_dim=128, patch_dim=64, n_bootstrap=2, n_geometric=2,\n", " n_heads=4, dropout=0.1):\n", " super().__init__()\n", " self.embed_dim = embed_dim\n", "\n", " # Patch embedding\n", " self.patch_embed = PatchEmbedding3D(patch_dim)\n", "\n", " # === Stage 0: Local encoder + gate heads (pre-attention) ===\n", " # Shared MLP gives local heads enough capacity to extract\n", " # dims/curvature/boundary from 32 voxels without cross-patch info\n", " local_hidden = patch_dim * 2 # 128\n", " self.local_encoder = nn.Sequential(\n", " nn.Linear(patch_dim, local_hidden), nn.GELU(), nn.Dropout(dropout),\n", " nn.Linear(local_hidden, local_hidden), nn.GELU(), nn.Dropout(dropout),\n", " )\n", " self.local_dim_head = nn.Linear(local_hidden, NUM_LOCAL_DIMS)\n", " self.local_curv_head = nn.Linear(local_hidden, NUM_LOCAL_CURVS)\n", " self.local_bound_head = nn.Linear(local_hidden, NUM_LOCAL_BOUNDARY)\n", " self.local_axis_head = nn.Linear(local_hidden, NUM_LOCAL_AXES)\n", "\n", " # Project [embedding, local_gates] → embed_dim for bootstrap\n", " self.proj = nn.Linear(patch_dim + LOCAL_GATE_DIM, embed_dim)\n", "\n", " # === Stage 1: Bootstrap blocks (attention with local gate context) ===\n", " self.bootstrap_blocks = nn.ModuleList([\n", " TransformerBlock(embed_dim, n_heads, dropout)\n", " for _ in range(n_bootstrap)\n", " ])\n", "\n", " # === Stage 1.5: Structural gate heads (from h, post-attention) ===\n", " self.struct_topo_head = nn.Linear(embed_dim, NUM_STRUCT_TOPO)\n", " self.struct_neighbor_head = nn.Linear(embed_dim, NUM_STRUCT_NEIGHBOR)\n", " self.struct_role_head = nn.Linear(embed_dim, NUM_STRUCT_ROLE)\n", "\n", " # === Stage 2: Geometric gated blocks (see both gate tiers) ===\n", " self.geometric_blocks = nn.ModuleList([\n", " GeometricTransformerBlock(embed_dim, TOTAL_GATE_DIM, n_heads, dropout)\n", " for _ in range(n_geometric)\n", " ])\n", "\n", " # === Stage 3: Gated classification ===\n", " gated_dim = embed_dim + TOTAL_GATE_DIM\n", "\n", " self.patch_shape_head = nn.Sequential(\n", " nn.Linear(gated_dim, embed_dim), nn.GELU(), nn.Dropout(dropout),\n", " nn.Linear(embed_dim, NUM_CLASSES)\n", " )\n", "\n", " self.global_pool = nn.Sequential(\n", " nn.Linear(gated_dim, embed_dim), nn.GELU(),\n", " nn.Linear(embed_dim, embed_dim)\n", " )\n", " self.global_gate_head = nn.Linear(embed_dim, NUM_GATES)\n", " self.global_shape_head = nn.Linear(embed_dim, NUM_CLASSES)\n", "\n", " def forward(self, x):\n", " # === Raw patch embedding ===\n", " e = self.patch_embed(x) # (B, 64, patch_dim)\n", "\n", " # === Stage 0: Local gates from raw embedding via local encoder ===\n", " e_local = self.local_encoder(e) # (B, 64, local_hidden)\n", " local_dim_logits = self.local_dim_head(e_local)\n", " local_curv_logits = self.local_curv_head(e_local)\n", " local_bound_logits = self.local_bound_head(e_local)\n", " local_axis_logits = self.local_axis_head(e_local)\n", "\n", " local_gates = torch.cat([\n", " F.softmax(local_dim_logits, dim=-1),\n", " F.softmax(local_curv_logits, dim=-1),\n", " torch.sigmoid(local_bound_logits),\n", " torch.sigmoid(local_axis_logits),\n", " ], dim=-1) # (B, 64, 11)\n", "\n", " # === Stage 1: Bootstrap with local gate context ===\n", " h = self.proj(torch.cat([e, local_gates], dim=-1))\n", " for blk in self.bootstrap_blocks:\n", " h = blk(h)\n", "\n", " # === Stage 1.5: Structural gates from h (after cross-patch context) ===\n", " struct_topo_logits = self.struct_topo_head(h)\n", " struct_neighbor_logits = self.struct_neighbor_head(h)\n", " struct_role_logits = self.struct_role_head(h)\n", "\n", " structural_gates = torch.cat([\n", " F.softmax(struct_topo_logits, dim=-1),\n", " torch.sigmoid(struct_neighbor_logits),\n", " F.softmax(struct_role_logits, dim=-1),\n", " ], dim=-1) # (B, 64, 6)\n", "\n", " # === Combined gate vector ===\n", " all_gates = torch.cat([local_gates, structural_gates], dim=-1) # (B, 64, 17)\n", "\n", " # === Stage 2: Geometric gated transformer ===\n", " for blk in self.geometric_blocks:\n", " h = blk(h, all_gates)\n", "\n", " # === Stage 3: Classification from gated representations ===\n", " h_gated = torch.cat([h, all_gates], dim=-1)\n", " shape_logits = self.patch_shape_head(h_gated)\n", " g = self.global_pool(h_gated.mean(dim=1))\n", "\n", " return {\n", " # Local gate predictions (Stage 0)\n", " \"local_dim_logits\": local_dim_logits,\n", " \"local_curv_logits\": local_curv_logits,\n", " \"local_bound_logits\": local_bound_logits,\n", " \"local_axis_logits\": local_axis_logits,\n", "\n", " # Structural gate predictions (Stage 1.5)\n", " \"struct_topo_logits\": struct_topo_logits,\n", " \"struct_neighbor_logits\": struct_neighbor_logits,\n", " \"struct_role_logits\": struct_role_logits,\n", "\n", " # Shape predictions (Stage 3)\n", " \"patch_shape_logits\": shape_logits,\n", " \"patch_features\": h,\n", " \"global_features\": g,\n", " \"global_gates\": self.global_gate_head(g),\n", " \"global_shapes\": self.global_shape_head(g),\n", " }\n", "\n", "\n", "# === Loss =====================================================================\n", "\n", "class SuperpositionLoss(nn.Module):\n", " def __init__(self, local_weight=1.0, struct_weight=1.0, shape_weight=1.0, global_weight=0.5):\n", " super().__init__()\n", " self.lw, self.sw, self.shw, self.gw = local_weight, struct_weight, shape_weight, global_weight\n", "\n", " def forward(self, outputs, targets):\n", " occ_mask = targets[\"patch_occupancy\"] > 0.01\n", " n_occ = occ_mask.sum().clamp(min=1)\n", "\n", " # --- Local gate losses ---\n", " dim_loss = F.cross_entropy(\n", " outputs[\"local_dim_logits\"].view(-1, NUM_LOCAL_DIMS),\n", " targets[\"patch_dims\"].clamp(0, NUM_LOCAL_DIMS - 1).view(-1),\n", " reduction='none').view_as(occ_mask)\n", " curv_loss = F.cross_entropy(\n", " outputs[\"local_curv_logits\"].view(-1, NUM_LOCAL_CURVS),\n", " targets[\"patch_curvature\"].clamp(0, NUM_LOCAL_CURVS - 1).view(-1),\n", " reduction='none').view_as(occ_mask)\n", " bound_loss = F.binary_cross_entropy_with_logits(\n", " outputs[\"local_bound_logits\"].squeeze(-1),\n", " targets[\"patch_boundary\"],\n", " reduction='none')\n", " axis_loss = F.binary_cross_entropy_with_logits(\n", " outputs[\"local_axis_logits\"],\n", " targets[\"patch_axis_active\"],\n", " reduction='none').mean(dim=-1)\n", "\n", " local_loss = ((dim_loss + curv_loss + bound_loss + axis_loss) * occ_mask.float()).sum() / n_occ\n", "\n", " # --- Structural gate losses ---\n", " topo_loss = F.cross_entropy(\n", " outputs[\"struct_topo_logits\"].view(-1, NUM_STRUCT_TOPO),\n", " targets[\"patch_topology\"].clamp(0, NUM_STRUCT_TOPO - 1).view(-1),\n", " reduction='none').view_as(occ_mask)\n", " neighbor_loss = F.mse_loss(\n", " torch.sigmoid(outputs[\"struct_neighbor_logits\"].squeeze(-1)),\n", " targets[\"patch_neighbor_count\"],\n", " reduction='none')\n", " role_loss = F.cross_entropy(\n", " outputs[\"struct_role_logits\"].view(-1, NUM_STRUCT_ROLE),\n", " targets[\"patch_surface_role\"].clamp(0, NUM_STRUCT_ROLE - 1).view(-1),\n", " reduction='none').view_as(occ_mask)\n", "\n", " struct_loss = ((topo_loss + neighbor_loss + role_loss) * occ_mask.float()).sum() / n_occ\n", "\n", " # --- Shape losses ---\n", " shape_loss = F.binary_cross_entropy_with_logits(\n", " outputs[\"patch_shape_logits\"],\n", " targets[\"patch_shape_membership\"],\n", " reduction='none').mean(dim=-1)\n", " shape_loss = (shape_loss * occ_mask.float()).sum() / n_occ\n", "\n", " # --- Global losses ---\n", " global_gate_loss = F.binary_cross_entropy_with_logits(outputs[\"global_gates\"], targets[\"global_gates\"])\n", " global_shape_loss = F.binary_cross_entropy_with_logits(outputs[\"global_shapes\"], targets[\"global_shapes\"])\n", " global_loss = global_gate_loss + global_shape_loss\n", "\n", " total = self.lw * local_loss + self.sw * struct_loss + self.shw * shape_loss + self.gw * global_loss\n", "\n", " return {\n", " \"total\": total,\n", " \"local\": local_loss,\n", " \"struct\": struct_loss,\n", " \"shape\": shape_loss,\n", " \"global\": global_loss,\n", " }\n", "\n", "\n", "print(\"✓ Model ready (Two-Tier Gated Transformer)\")" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "PG9aQ7QoQYox", "outputId": "bc002937-2d03-4608-accf-03c9cf052cc4" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "✓ Generator ready | Local: 11d | Structural: 6d | Total: 17d\n", "✓ Model ready (Two-Tier Gated Transformer)\n" ] } ] }, { "cell_type": "markdown", "source": [ "# t5 small" ], "metadata": { "id": "yRdDaLDiQC89" } }, { "cell_type": "markdown", "source": [ "## model" ], "metadata": { "id": "0V0bKylWImYK" } }, { "cell_type": "code", "source": [ "\"\"\"\n", "Hierarchical Shape Generator - Two-Tier Gate Version\n", "======================================================\n", "Generates grids only. Patch analysis split into:\n", " - Local properties: intrinsic to each patch's voxels (no cross-patch info)\n", " - Structural properties: relational, require neighborhood context\n", "\n", "Colab Cell 1 of 3 - runs first, populates shared namespace.\n", "\"\"\"\n", "\n", "import numpy as np\n", "from typing import Dict, Optional\n", "from itertools import combinations\n", "\n", "# === Grid Constants ===========================================================\n", "GZ, GY, GX = 8, 16, 16\n", "GRID_SHAPE = (GZ, GY, GX)\n", "GRID_VOLUME = GZ * GY * GX\n", "\n", "PATCH_Z, PATCH_Y, PATCH_X = 2, 4, 4\n", "PATCH_VOL = PATCH_Z * PATCH_Y * PATCH_X\n", "MACRO_Z, MACRO_Y, MACRO_X = GZ // PATCH_Z, GY // PATCH_Y, GX // PATCH_X\n", "MACRO_N = MACRO_Z * MACRO_Y * MACRO_X\n", "\n", "# Worker budget (A100 Colab limit)\n", "MAX_WORKERS = 10\n", "\n", "_COORDS = np.mgrid[0:GZ, 0:GY, 0:GX].reshape(3, -1).T.astype(np.float64)\n", "\n", "# === Classes ==================================================================\n", "CLASS_NAMES = [\n", " \"point\", \"line\", \"corner\", \"cross\", \"arc\", \"helix\", \"circle\",\n", " \"triangle\", \"quad\", \"plane\", \"disc\",\n", " \"tetrahedron\", \"cube\", \"pyramid\", \"prism\", \"octahedron\", \"pentachoron\", \"wedge\",\n", " \"sphere\", \"hemisphere\", \"torus\", \"bowl\", \"saddle\", \"capsule\", \"cylinder\", \"cone\", \"channel\"\n", "]\n", "NUM_CLASSES = len(CLASS_NAMES)\n", "CLASS_TO_IDX = {n: i for i, n in enumerate(CLASS_NAMES)}\n", "\n", "# === Two-Tier Gate Constants ==================================================\n", "\n", "# Local gates: intrinsic to each patch, no cross-patch info needed\n", "# dims: 4 classes (0D point, 1D line, 2D surface, 3D volume)\n", "# curvature: 3 classes (rigid, curved, combined)\n", "# boundary: 1 binary (partial fill = surface patch)\n", "# axis_active: 3 binary (which axes have extent > 1 voxel)\n", "NUM_LOCAL_DIMS = 4\n", "NUM_LOCAL_CURVS = 3\n", "NUM_LOCAL_BOUNDARY = 1\n", "NUM_LOCAL_AXES = 3\n", "LOCAL_GATE_DIM = NUM_LOCAL_DIMS + NUM_LOCAL_CURVS + NUM_LOCAL_BOUNDARY + NUM_LOCAL_AXES # 11\n", "\n", "# Structural gates: relational, require neighborhood context (post-attention)\n", "# topology: 2 classes (open / closed based on neighbor count)\n", "# neighbor_ct: 1 continuous (normalized 0-1, raw count / 6)\n", "# surface_role: 3 classes (isolated 0-1 neighbors, boundary 2-4, interior 5-6)\n", "NUM_STRUCT_TOPO = 2\n", "NUM_STRUCT_NEIGHBOR = 1\n", "NUM_STRUCT_ROLE = 3\n", "STRUCTURAL_GATE_DIM = NUM_STRUCT_TOPO + NUM_STRUCT_NEIGHBOR + NUM_STRUCT_ROLE # 6\n", "\n", "TOTAL_GATE_DIM = LOCAL_GATE_DIM + STRUCTURAL_GATE_DIM # 17\n", "\n", "# Legacy compat\n", "GATES = [\"rigid\", \"curved\", \"combined\", \"open\", \"closed\"]\n", "NUM_GATES = len(GATES)\n", "\n", "\n", "# === Rasterization ============================================================\n", "def rasterize_line(p1, p2):\n", " p1, p2 = np.array(p1, dtype=float), np.array(p2, dtype=float)\n", " n = max(int(np.max(np.abs(p2 - p1))) + 1, 2)\n", " t = np.linspace(0, 1, n)[:, None]\n", " pts = np.round(p1 + t * (p2 - p1)).astype(int)\n", " return np.clip(pts, [0, 0, 0], [GZ-1, GY-1, GX-1])\n", "\n", "\n", "def rasterize_edges(verts, edges):\n", " pts = []\n", " for i, j in edges:\n", " pts.append(rasterize_line(verts[i], verts[j]))\n", " return np.concatenate(pts)\n", "\n", "\n", "def rasterize_faces(verts, faces, density=1.0):\n", " pts = []\n", " for f in faces:\n", " v0, v1, v2 = [np.array(verts[i], dtype=float) for i in f[:3]]\n", " e1, e2 = v1 - v0, v2 - v0\n", " n = max(int(max(np.linalg.norm(e1), np.linalg.norm(e2)) * density) + 1, 3)\n", " for u in np.linspace(0, 1, n):\n", " for v in np.linspace(0, 1 - u, max(int(n * (1 - u)), 1)):\n", " p = np.round(v0 + u * e1 + v * e2).astype(int)\n", " pts.append(p)\n", " return np.clip(np.array(pts), [0, 0, 0], [GZ-1, GY-1, GX-1])\n", "\n", "\n", "def rasterize_sphere(c, r, fill=True, half=False, zmin=None, zmax=None):\n", " c = np.array(c, dtype=float)\n", " pts = []\n", " zr = range(max(0, int(c[0] - r)), min(GZ, int(c[0] + r) + 1))\n", " for z in zr:\n", " for y in range(max(0, int(c[1] - r)), min(GY, int(c[1] + r) + 1)):\n", " for x in range(max(0, int(c[2] - r)), min(GX, int(c[2] + r) + 1)):\n", " d = np.sqrt((z - c[0])**2 + (y - c[1])**2 + (x - c[2])**2)\n", " if fill and d <= r:\n", " if zmin is not None and z < zmin: continue\n", " if zmax is not None and z > zmax: continue\n", " pts.append([z, y, x])\n", " elif not fill and abs(d - r) < 0.8:\n", " if half and z < c[0]: continue\n", " pts.append([z, y, x])\n", " return np.array(pts) if pts else np.zeros((0, 3), dtype=int)\n", "\n", "\n", "# === Shape Generators =========================================================\n", "class HierarchicalShapeGenerator:\n", " def __init__(self, seed=42):\n", " self.rng = np.random.RandomState(seed)\n", "\n", " def _random_center(self, margin=3):\n", " return [self.rng.randint(max(1, margin//2), max(2, GZ - margin//2)),\n", " self.rng.randint(margin, GY - margin),\n", " self.rng.randint(margin, GX - margin)]\n", "\n", " def _to_grid(self, pts):\n", " if len(pts) == 0: return None, None\n", " grid = np.zeros(GRID_SHAPE, dtype=np.float32)\n", " pts = np.clip(np.array(pts).astype(int), [0, 0, 0], [GZ-1, GY-1, GX-1])\n", " grid[pts[:, 0], pts[:, 1], pts[:, 2]] = 1.0\n", " return grid, pts\n", "\n", " def generate(self, name):\n", " r = self.rng\n", " c = self._random_center()\n", "\n", " try:\n", " if name == \"point\":\n", " pts = [c]\n", " elif name == \"line\":\n", " axis = r.randint(0, 3)\n", " p1, p2 = list(c), list(c)\n", " L = r.randint(4, [GZ, GY, GX][axis])\n", " p1[axis] = max(0, c[axis] - L//2)\n", " p2[axis] = min([GZ, GY, GX][axis] - 1, c[axis] + L//2)\n", " pts = rasterize_line(p1, p2)\n", " elif name == \"corner\":\n", " L = r.randint(3, 7)\n", " p1, p2 = list(c), list(c)\n", " p1[1] = max(0, c[1] - L)\n", " p2[2] = min(GX - 1, c[2] + L)\n", " pts = np.concatenate([rasterize_line(c, p1), rasterize_line(c, p2)])\n", " elif name == \"cross\":\n", " L = r.randint(2, 5)\n", " pts = []\n", " for d in range(3):\n", " p1, p2 = list(c), list(c)\n", " p1[d] = max(0, c[d] - L)\n", " p2[d] = min([GZ, GY, GX][d] - 1, c[d] + L)\n", " pts.append(rasterize_line(p1, p2))\n", " pts = np.concatenate(pts)\n", " elif name == \"arc\":\n", " R = r.uniform(2, 5)\n", " t = np.linspace(0, np.pi * r.uniform(0.4, 0.9), 30)\n", " pts = np.round(np.column_stack([c[0] + np.zeros_like(t), c[1] + R*np.cos(t), c[2] + R*np.sin(t)])).astype(int)\n", " elif name == \"helix\":\n", " R, H = r.uniform(2, 4), r.uniform(3, GZ - 2)\n", " t = np.linspace(0, 4*np.pi, 60)\n", " pts = np.round(np.column_stack([c[0] - H/2 + t/(4*np.pi)*H, c[1] + R*np.cos(t), c[2] + R*np.sin(t)])).astype(int)\n", " elif name == \"circle\":\n", " R = r.uniform(2, 5)\n", " t = np.linspace(0, 2*np.pi, 40)\n", " pts = np.round(np.column_stack([np.full_like(t, c[0]), c[1] + R*np.cos(t), c[2] + R*np.sin(t)])).astype(int)\n", " elif name == \"triangle\":\n", " s = r.uniform(3, 6)\n", " v = [[c[0], c[1] - s, c[2]], [c[0], c[1] + s//2, c[2] - s], [c[0], c[1] + s//2, c[2] + s]]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,0)])\n", " elif name == \"quad\":\n", " s = r.randint(2, 5)\n", " v = [[c[0], c[1]-s, c[2]-s], [c[0], c[1]-s, c[2]+s], [c[0], c[1]+s, c[2]+s], [c[0], c[1]+s, c[2]-s]]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,3),(3,0)])\n", " elif name == \"plane\":\n", " s = r.randint(2, 5)\n", " pts = rasterize_faces([[c[0],c[1]-s,c[2]-s],[c[0],c[1]-s,c[2]+s],[c[0],c[1]+s,c[2]+s],[c[0],c[1]+s,c[2]-s]], [(0,1,2),(0,2,3)])\n", " elif name == \"disc\":\n", " R = r.uniform(2, 5)\n", " pts = rasterize_sphere(c, R, fill=True)\n", " pts = pts[pts[:, 0] == c[0]] if len(pts) > 0 else pts\n", " elif name == \"tetrahedron\":\n", " s = r.uniform(3, 5)\n", " v = [[c[0]+s,c[1],c[2]], [c[0]-s//2,c[1]+s,c[2]], [c[0]-s//2,c[1]-s//2,c[2]+s], [c[0]-s//2,c[1]-s//2,c[2]-s]]\n", " pts = rasterize_edges(v, [(0,1),(0,2),(0,3),(1,2),(1,3),(2,3)])\n", " elif name == \"cube\":\n", " s = r.randint(2, 4)\n", " v = [[c[0]+d[0]*s, c[1]+d[1]*s, c[2]+d[2]*s] for d in [(-1,-1,-1),(-1,-1,1),(-1,1,1),(-1,1,-1),(1,-1,-1),(1,-1,1),(1,1,1),(1,1,-1)]]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,3),(3,0),(4,5),(5,6),(6,7),(7,4),(0,4),(1,5),(2,6),(3,7)])\n", " elif name == \"pyramid\":\n", " s = r.randint(2, 4)\n", " base = [[c[0]-s,c[1]-s,c[2]-s],[c[0]-s,c[1]-s,c[2]+s],[c[0]-s,c[1]+s,c[2]+s],[c[0]-s,c[1]+s,c[2]-s]]\n", " apex = [c[0]+s, c[1], c[2]]\n", " v = base + [apex]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,3),(3,0),(0,4),(1,4),(2,4),(3,4)])\n", " elif name == \"prism\":\n", " s, h = r.randint(2, 4), r.randint(2, 4)\n", " bottom = [[c[0]-h,c[1]-s,c[2]], [c[0]-h,c[1]+s//2,c[2]-s], [c[0]-h,c[1]+s//2,c[2]+s]]\n", " top = [[b[0]+2*h, b[1], b[2]] for b in bottom]\n", " v = bottom + top\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,0),(3,4),(4,5),(5,3),(0,3),(1,4),(2,5)])\n", " elif name == \"octahedron\":\n", " s = r.uniform(2, 4)\n", " v = [[c[0]+s,c[1],c[2]],[c[0]-s,c[1],c[2]],[c[0],c[1]+s,c[2]],[c[0],c[1]-s,c[2]],[c[0],c[1],c[2]+s],[c[0],c[1],c[2]-s]]\n", " pts = rasterize_edges(v, [(0,2),(0,3),(0,4),(0,5),(1,2),(1,3),(1,4),(1,5),(2,4),(2,5),(3,4),(3,5)])\n", " elif name == \"pentachoron\":\n", " s = r.uniform(2, 4)\n", " v = [[c[0]+s,c[1],c[2]],[c[0]-s//2,c[1]+s,c[2]],[c[0]-s//2,c[1]-s//2,c[2]+s],[c[0]-s//2,c[1]-s//2,c[2]-s],[c[0],c[1],c[2]]]\n", " pts = rasterize_edges(v, [(i,j) for i in range(5) for j in range(i+1,5)])\n", " elif name == \"wedge\":\n", " s = r.randint(2, 4)\n", " v = [[c[0]-s,c[1]-s,c[2]-s],[c[0]-s,c[1]+s,c[2]-s],[c[0]-s,c[1],c[2]+s],[c[0]+s,c[1]-s,c[2]-s],[c[0]+s,c[1]+s,c[2]-s],[c[0]+s,c[1],c[2]+s]]\n", " pts = rasterize_edges(v, [(0,1),(1,2),(2,0),(3,4),(4,5),(5,3),(0,3),(1,4),(2,5)])\n", " elif name == \"sphere\":\n", " R = r.uniform(2, min(3.5, GZ//2 - 1))\n", " pts = rasterize_sphere(c, R, fill=False)\n", " elif name == \"hemisphere\":\n", " R = r.uniform(2, min(3.5, GZ//2 - 1))\n", " pts = rasterize_sphere(c, R, fill=False, half=True)\n", " elif name == \"torus\":\n", " R, rr = r.uniform(3, 5), r.uniform(1, 2)\n", " t = np.linspace(0, 2*np.pi, 40)\n", " p = np.linspace(0, 2*np.pi, 20)\n", " T, P = np.meshgrid(t, p)\n", " pts = np.round(np.column_stack([c[0] + rr*np.sin(P.ravel()), c[1] + (R+rr*np.cos(P.ravel()))*np.cos(T.ravel()), c[2] + (R+rr*np.cos(P.ravel()))*np.sin(T.ravel())])).astype(int)\n", " elif name == \"bowl\":\n", " R = r.uniform(2, 4)\n", " pts = rasterize_sphere(c, R, fill=False)\n", " pts = pts[pts[:, 0] >= c[0]] if len(pts) > 0 else pts\n", " elif name == \"saddle\":\n", " s = r.uniform(2, 4)\n", " Y, X = np.mgrid[-s:s:0.5, -s:s:0.5]\n", " Z = (Y**2 - X**2) / (2*s)\n", " pts = np.round(np.column_stack([c[0] + Z.ravel(), c[1] + Y.ravel(), c[2] + X.ravel()])).astype(int)\n", " elif name == \"capsule\":\n", " R, H = r.uniform(1.5, 3), r.uniform(2, 4)\n", " shell = rasterize_sphere(c, R, fill=False)\n", " body = []\n", " for z in range(max(0, int(c[0]-H//2)), min(GZ, int(c[0]+H//2)+1)):\n", " for y in range(GY):\n", " for x in range(GX):\n", " if abs(np.sqrt((y-c[1])**2 + (x-c[2])**2) - R) < 0.8:\n", " body.append([z, y, x])\n", " pts = np.concatenate([shell, np.array(body) if body else np.zeros((0,3), dtype=int)])\n", " elif name == \"cylinder\":\n", " R, H = r.uniform(2, 4), r.uniform(3, GZ - 2)\n", " pts = []\n", " for z in range(max(0, int(c[0]-H/2)), min(GZ, int(c[0]+H/2)+1)):\n", " for y in range(GY):\n", " for x in range(GX):\n", " d = np.sqrt((y-c[1])**2 + (x-c[2])**2)\n", " if abs(d - R) < 0.8:\n", " pts.append([z, y, x])\n", " pts = np.array(pts) if pts else np.zeros((0,3), dtype=int)\n", " elif name == \"cone\":\n", " R, H = r.uniform(2, 4), r.uniform(3, GZ - 2)\n", " pts = []\n", " for z in range(max(0, int(c[0]-H/2)), min(GZ, int(c[0]+H/2)+1)):\n", " frac = 1 - (z - (c[0]-H/2)) / H\n", " cr = R * frac\n", " for y in range(GY):\n", " for x in range(GX):\n", " d = np.sqrt((y-c[1])**2 + (x-c[2])**2)\n", " if abs(d - cr) < 0.8 and cr > 0.3:\n", " pts.append([z, y, x])\n", " pts = np.array(pts) if pts else np.zeros((0,3), dtype=int)\n", " elif name == \"channel\":\n", " R = r.uniform(2, 4)\n", " L = r.randint(6, GX - 2)\n", " pts = []\n", " for z in range(GZ):\n", " for x in range(max(0, c[2]-L//2), min(GX, c[2]+L//2)):\n", " for y in range(GY):\n", " d = np.sqrt((z - c[0])**2 + (y - c[1])**2)\n", " if abs(d - R) < 0.8:\n", " pts.append([z, y, x])\n", " pts = np.array(pts) if pts else np.zeros((0,3), dtype=int)\n", " else:\n", " return None\n", " except Exception:\n", " return None\n", "\n", " grid, pts = self._to_grid(pts)\n", " if grid is not None and pts is not None and len(pts) > 0:\n", " return {\"grid\": grid, \"class_idx\": CLASS_TO_IDX[name]}\n", " return None\n", "\n", " def generate_multi(self, n_shapes: int = None) -> Optional[Dict]:\n", " if n_shapes is None:\n", " n_shapes = self.rng.randint(2, 5)\n", " names = list(self.rng.choice(CLASS_NAMES, size=n_shapes, replace=False))\n", " shapes = [s for s in [self.generate(n) for n in names] if s is not None]\n", " if len(shapes) < 2:\n", " return None\n", " grid = np.zeros(GRID_SHAPE, dtype=np.float32)\n", " membership = np.zeros((MACRO_N, NUM_CLASSES), dtype=np.float32)\n", " for s in shapes:\n", " pts = np.argwhere(s[\"grid\"] > 0.5)\n", " grid[pts[:, 0], pts[:, 1], pts[:, 2]] = 1.0\n", " patch_idx = (pts[:, 0]//PATCH_Z) * (MACRO_Y*MACRO_X) + (pts[:, 1]//PATCH_Y) * MACRO_X + (pts[:, 2]//PATCH_X)\n", " np.add.at(membership[:, s[\"class_idx\"]], patch_idx, 1.0)\n", " return {\"grid\": grid, \"membership\": (membership > 0).astype(np.float32), \"n_shapes\": len(shapes)}\n", "\n", "\n", "def _worker(args):\n", " seed, min_s, max_s = args\n", " gen = HierarchicalShapeGenerator(seed)\n", " return gen.generate_multi(gen.rng.randint(min_s, max_s + 1))\n", "\n", "\n", "def generate_dataset(n_samples: int, seed: int = 42, num_workers: int = MAX_WORKERS) -> Dict:\n", " from multiprocessing import Pool\n", " try:\n", " from tqdm import tqdm\n", " use_tqdm = True\n", " except ImportError:\n", " use_tqdm = False\n", "\n", " tasks = [(seed * 10000 + i, 2, 4) for i in range(n_samples * 2)]\n", " grids, memberships, n_shapes = [], [], []\n", "\n", " with Pool(num_workers) as pool:\n", " pbar = tqdm(total=n_samples, desc=\"Generating\") if use_tqdm else None\n", " for r in pool.imap_unordered(_worker, tasks):\n", " if r is not None and len(grids) < n_samples:\n", " grids.append(r[\"grid\"])\n", " memberships.append(r[\"membership\"])\n", " n_shapes.append(r[\"n_shapes\"])\n", " if pbar: pbar.update(1)\n", " if len(grids) >= n_samples:\n", " break\n", " if pbar: pbar.close()\n", "\n", " return {\"grids\": np.array(grids), \"memberships\": np.array(memberships), \"n_shapes\": np.array(n_shapes)}\n", "\n", "\n", "# === Patch Analysis: Two-Tier =================================================\n", "\n", "def analyze_local_patches(grids):\n", " \"\"\"\n", " Local patch properties — intrinsic to each patch's voxels.\n", " No cross-patch information. Computable from raw patch data.\n", "\n", " Returns:\n", " occupancy: (N, 64) float — mean voxel density\n", " dims: (N, 64) long — 0-3 (axis extent counting)\n", " curvature: (N, 64) long — 0=rigid, 1=curved, 2=combined\n", " boundary: (N, 64) float — 1.0 if partial fill (surface patch)\n", " axis_active: (N, 64, 3) float — which axes have extent > 1\n", " fill_ratio: (N, 64) float — voxels / bounding_box_volume\n", " \"\"\"\n", " import torch\n", "\n", " if isinstance(grids, np.ndarray):\n", " grids = torch.from_numpy(grids).float()\n", "\n", " device, N = grids.device, grids.shape[0]\n", " patches = grids.view(N, MACRO_Z, PATCH_Z, MACRO_Y, PATCH_Y, MACRO_X, PATCH_X)\n", " patches = patches.permute(0, 1, 3, 5, 2, 4, 6).contiguous().view(N, MACRO_N, PATCH_Z, PATCH_Y, PATCH_X)\n", "\n", " occupancy = patches.sum(dim=(2, 3, 4)) / PATCH_VOL\n", " occ_mask = occupancy > 0.01\n", " occ = patches > 0.5\n", "\n", " z_c = torch.arange(PATCH_Z, device=device).view(1, 1, PATCH_Z, 1, 1).float()\n", " y_c = torch.arange(PATCH_Y, device=device).view(1, 1, 1, PATCH_Y, 1).float()\n", " x_c = torch.arange(PATCH_X, device=device).view(1, 1, 1, 1, PATCH_X).float()\n", " INF = 1000.0\n", "\n", " z_ext = torch.where(occ, z_c.expand_as(patches), torch.full_like(patches, -INF)).amax(dim=(2,3,4)) - torch.where(occ, z_c.expand_as(patches), torch.full_like(patches, INF)).amin(dim=(2,3,4))\n", " y_ext = torch.where(occ, y_c.expand_as(patches), torch.full_like(patches, -INF)).amax(dim=(2,3,4)) - torch.where(occ, y_c.expand_as(patches), torch.full_like(patches, INF)).amin(dim=(2,3,4))\n", " x_ext = torch.where(occ, x_c.expand_as(patches), torch.full_like(patches, -INF)).amax(dim=(2,3,4)) - torch.where(occ, x_c.expand_as(patches), torch.full_like(patches, INF)).amin(dim=(2,3,4))\n", "\n", " ext_sorted, _ = torch.stack([z_ext, y_ext, x_ext], dim=-1).clamp(min=0).sort(dim=-1, descending=True)\n", " dims = torch.zeros(N, MACRO_N, dtype=torch.long, device=device)\n", " dims = torch.where(ext_sorted[..., 0] >= 1, torch.tensor(1, device=device), dims)\n", " dims = torch.where(ext_sorted[..., 1] >= 1, torch.tensor(2, device=device), dims)\n", " dims = torch.where(ext_sorted[..., 2] >= 1, torch.tensor(3, device=device), dims)\n", " dims = torch.where(~occ_mask, torch.tensor(-1, device=device), dims)\n", "\n", " voxels = patches.sum(dim=(2, 3, 4))\n", " bb_vol = ((z_ext + 1) * (y_ext + 1) * (x_ext + 1)).clamp(min=1)\n", " fill_ratio = voxels / bb_vol\n", " curvature = torch.where(fill_ratio > 0.6, 0, torch.where(fill_ratio < 0.3, 1, 2)).long()\n", "\n", " boundary = ((occupancy > 0.01) & (occupancy < 0.9)).float()\n", "\n", " axis_active = torch.stack([\n", " (z_ext.clamp(min=0) >= 1).float(),\n", " (y_ext.clamp(min=0) >= 1).float(),\n", " (x_ext.clamp(min=0) >= 1).float(),\n", " ], dim=-1)\n", "\n", " return {\n", " \"occupancy\": occupancy,\n", " \"dims\": dims,\n", " \"curvature\": curvature,\n", " \"boundary\": boundary,\n", " \"axis_active\": axis_active,\n", " \"fill_ratio\": fill_ratio,\n", " }\n", "\n", "\n", "def analyze_structural_patches(grids, local_data):\n", " \"\"\"\n", " Structural patch properties — relational, require neighborhood context.\n", " Ground truth targets for post-attention heads.\n", "\n", " Returns:\n", " topology: (N, 64) long — 0=open (<= 3 neighbors), 1=closed (> 3)\n", " neighbor_count: (N, 64) float — normalized 0-1 (raw count / 6)\n", " surface_role: (N, 64) long — 0=isolated (0-1), 1=boundary (2-4), 2=interior (5-6)\n", " \"\"\"\n", " import torch\n", " import torch.nn.functional as F\n", "\n", " if isinstance(grids, np.ndarray):\n", " grids = torch.from_numpy(grids).float()\n", "\n", " device, N = grids.device, grids.shape[0]\n", " occ_mask = local_data[\"occupancy\"] > 0.01\n", "\n", " occ_3d = occ_mask.float().view(N, 1, MACRO_Z, MACRO_Y, MACRO_X)\n", " kernel = torch.zeros(1, 1, 3, 3, 3, device=device)\n", " kernel[0, 0, 1, 1, 0] = kernel[0, 0, 1, 1, 2] = 1\n", " kernel[0, 0, 1, 0, 1] = kernel[0, 0, 1, 2, 1] = 1\n", " kernel[0, 0, 0, 1, 1] = kernel[0, 0, 2, 1, 1] = 1\n", " raw_count = F.conv3d(occ_3d, kernel, padding=1).view(N, MACRO_N)\n", "\n", " topology = (raw_count > 3).long()\n", " neighbor_count = raw_count / 6.0\n", "\n", " surface_role = torch.zeros(N, MACRO_N, dtype=torch.long, device=device)\n", " surface_role = torch.where(raw_count >= 2, torch.tensor(1, device=device), surface_role)\n", " surface_role = torch.where(raw_count >= 5, torch.tensor(2, device=device), surface_role)\n", "\n", " return {\n", " \"topology\": topology,\n", " \"neighbor_count\": neighbor_count,\n", " \"surface_role\": surface_role,\n", " }\n", "\n", "\n", "def analyze_patches_torch(grids):\n", " \"\"\"Combined analysis — returns both local and structural properties.\"\"\"\n", " local_data = analyze_local_patches(grids)\n", " struct_data = analyze_structural_patches(grids, local_data)\n", "\n", " import torch\n", " N = local_data[\"occupancy\"].shape[0]\n", " device = local_data[\"occupancy\"].device\n", " labels = torch.zeros(N, MACRO_N, NUM_GATES, device=device)\n", " labels[..., 0] = (local_data[\"curvature\"] == 0).float()\n", " labels[..., 1] = (local_data[\"curvature\"] == 1).float()\n", " labels[..., 2] = (local_data[\"curvature\"] == 2).float()\n", " labels[..., 3] = (struct_data[\"topology\"] == 0).float()\n", " labels[..., 4] = (struct_data[\"topology\"] == 1).float()\n", "\n", " return {\n", " # Local\n", " \"patch_occupancy\": local_data[\"occupancy\"],\n", " \"patch_dims\": local_data[\"dims\"],\n", " \"patch_curvature\": local_data[\"curvature\"],\n", " \"patch_boundary\": local_data[\"boundary\"],\n", " \"patch_axis_active\": local_data[\"axis_active\"],\n", " \"patch_fill_ratio\": local_data[\"fill_ratio\"],\n", " # Structural\n", " \"patch_topology\": struct_data[\"topology\"],\n", " \"patch_neighbor_count\": struct_data[\"neighbor_count\"],\n", " \"patch_surface_role\": struct_data[\"surface_role\"],\n", " # Legacy\n", " \"patch_labels\": labels,\n", " }\n", "\n", "\n", "# === Dataset ==================================================================\n", "import torch\n", "from torch.utils.data import Dataset\n", "\n", "\n", "class ShapeDataset(Dataset):\n", " def __init__(self, grids, memberships, patch_data):\n", " self.grids = grids\n", " self.memberships = memberships\n", "\n", " # Local\n", " self.patch_occupancy = patch_data[\"patch_occupancy\"]\n", " self.patch_dims = patch_data[\"patch_dims\"]\n", " self.patch_curvature = patch_data[\"patch_curvature\"]\n", " self.patch_boundary = patch_data[\"patch_boundary\"]\n", " self.patch_axis_active = patch_data[\"patch_axis_active\"]\n", " self.patch_fill_ratio = patch_data[\"patch_fill_ratio\"]\n", "\n", " # Structural\n", " self.patch_topology = patch_data[\"patch_topology\"]\n", " self.patch_neighbor_count = patch_data[\"patch_neighbor_count\"]\n", " self.patch_surface_role = patch_data[\"patch_surface_role\"]\n", "\n", " # Legacy\n", " self.patch_labels = patch_data[\"patch_labels\"]\n", "\n", " # Derived global targets\n", " self.patch_shape_count = (memberships > 0).sum(dim=-1).long()\n", " self.global_shapes = (memberships.sum(dim=1) > 0).float()\n", " occ_mask = self.patch_occupancy > 0.01\n", " occ_count = occ_mask.sum(dim=1, keepdim=True).clamp(min=1)\n", " self.global_gates = (self.patch_labels * occ_mask.unsqueeze(-1)).sum(dim=1) / occ_count\n", "\n", " def __len__(self):\n", " return len(self.grids)\n", "\n", " def __getitem__(self, idx):\n", " return {\n", " \"grid\": self.grids[idx],\n", " \"patch_shape_membership\": self.memberships[idx],\n", " \"patch_shape_count\": self.patch_shape_count[idx],\n", " # Local\n", " \"patch_occupancy\": self.patch_occupancy[idx],\n", " \"patch_dims\": self.patch_dims[idx],\n", " \"patch_curvature\": self.patch_curvature[idx],\n", " \"patch_boundary\": self.patch_boundary[idx],\n", " \"patch_axis_active\": self.patch_axis_active[idx],\n", " \"patch_fill_ratio\": self.patch_fill_ratio[idx],\n", " # Structural\n", " \"patch_topology\": self.patch_topology[idx],\n", " \"patch_neighbor_count\": self.patch_neighbor_count[idx],\n", " \"patch_surface_role\": self.patch_surface_role[idx],\n", " # Legacy\n", " \"patch_labels\": self.patch_labels[idx],\n", " # Global\n", " \"global_shapes\": self.global_shapes[idx],\n", " \"global_gates\": self.global_gates[idx],\n", " }\n", "\n", "\n", "def collate_fn(batch):\n", " return {k: torch.stack([b[k] for b in batch]) for k in batch[0].keys()}\n", "\n", "\n", "print(f\"✓ Generator ready | Local: {LOCAL_GATE_DIM}d | Structural: {STRUCTURAL_GATE_DIM}d | Total: {TOTAL_GATE_DIM}d\")\n", "\n", "\"\"\"\n", "Superposition Patch Classifier - Two-Tier Gated Transformer\n", "=============================================================\n", "Colab Cell 2 of 3 - depends on Cell 1 (generator.py) namespace.\n", "\n", "Architecture:\n", " voxels → patch_embed → e₀\n", "\n", " Stage 0 (local gates): From raw embeddings, no attention\n", " e₀ → local_dim_head → dim_soft ─┐\n", " e₀ → local_curv_head → curv_soft ─┤ LOCAL_GATE_DIM = 11\n", " e₀ → local_bound_head → bound_soft ─┤\n", " e₀ → local_axis_head → axis_soft ─┘→ local_gates (detached)\n", "\n", " Stage 1 (bootstrap): Attention sees local gates\n", " proj([e₀, local_gates]) → bootstrap_block × N → h\n", "\n", " Stage 1.5 (structural gates): From h, after cross-patch context\n", " h → struct_topo_head → topo_soft ─┐\n", " h → struct_neighbor_head → neighbor_soft ─┤ STRUCTURAL_GATE_DIM = 6\n", " h → struct_role_head → role_soft ─┘→ structural_gates (detached)\n", "\n", " Stage 2 (geometric routing): Both gate tiers\n", " (h, local_gates, structural_gates) → geometric_block × N → h'\n", "\n", " Stage 3 (classification): Gated shape heads\n", " [h', local_gates, structural_gates] → shape_heads\n", "\"\"\"\n", "\n", "import math\n", "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "\n", "# Cell 1 provides: all constants including LOCAL_GATE_DIM, STRUCTURAL_GATE_DIM, TOTAL_GATE_DIM\n", "\n", "\n", "# === Patch Embedding ==========================================================\n", "\n", "class PatchEmbedding3D(nn.Module):\n", " def __init__(self, patch_dim=64):\n", " super().__init__()\n", " self.proj = nn.Linear(PATCH_VOL, patch_dim)\n", " pz = torch.arange(MACRO_Z).float() / MACRO_Z\n", " py = torch.arange(MACRO_Y).float() / MACRO_Y\n", " px = torch.arange(MACRO_X).float() / MACRO_X\n", " pos = torch.stack(torch.meshgrid(pz, py, px, indexing='ij'), dim=-1).reshape(MACRO_N, 3)\n", " self.register_buffer('pos_embed', pos)\n", " self.pos_proj = nn.Linear(3, patch_dim)\n", "\n", " def forward(self, x):\n", " B = x.shape[0]\n", " patches = x.view(B, MACRO_Z, PATCH_Z, MACRO_Y, PATCH_Y, MACRO_X, PATCH_X)\n", " patches = patches.permute(0, 1, 3, 5, 2, 4, 6).contiguous().view(B, MACRO_N, PATCH_VOL)\n", " return self.proj(patches) + self.pos_proj(self.pos_embed)\n", "\n", "\n", "# === Standard Transformer Block ===============================================\n", "\n", "class TransformerBlock(nn.Module):\n", " def __init__(self, dim, n_heads, dropout=0.1):\n", " super().__init__()\n", " self.attn = nn.MultiheadAttention(dim, n_heads, dropout=dropout, batch_first=True)\n", " self.ff = nn.Sequential(\n", " nn.Linear(dim, dim * 4), nn.GELU(), nn.Dropout(dropout),\n", " nn.Linear(dim * 4, dim), nn.Dropout(dropout)\n", " )\n", " self.ln1, self.ln2 = nn.LayerNorm(dim), nn.LayerNorm(dim)\n", "\n", " def forward(self, x):\n", " x = x + self.attn(self.ln1(x), self.ln1(x), self.ln1(x))[0]\n", " return x + self.ff(self.ln2(x))\n", "\n", "\n", "# === Geometric Gated Attention ================================================\n", "\n", "class GatedGeometricAttention(nn.Module):\n", " \"\"\"\n", " Multi-head attention with two-tier gate modulation.\n", " Q, K see both local and structural gates.\n", " V modulated by combined gate vector.\n", " Per-head compatibility bias from gate interactions.\n", " \"\"\"\n", "\n", " def __init__(self, embed_dim, gate_dim, n_heads, dropout=0.1):\n", " super().__init__()\n", " self.embed_dim = embed_dim\n", " self.n_heads = n_heads\n", " self.head_dim = embed_dim // n_heads\n", "\n", " # Q, K from [h, all_gates]\n", " self.q_proj = nn.Linear(embed_dim + gate_dim, embed_dim)\n", " self.k_proj = nn.Linear(embed_dim + gate_dim, embed_dim)\n", " self.v_proj = nn.Linear(embed_dim, embed_dim)\n", "\n", " # Per-head gate compatibility\n", " self.gate_q = nn.Linear(gate_dim, n_heads)\n", " self.gate_k = nn.Linear(gate_dim, n_heads)\n", "\n", " # Value modulation by gates\n", " self.v_gate = nn.Sequential(nn.Linear(gate_dim, embed_dim), nn.Sigmoid())\n", "\n", " self.out_proj = nn.Linear(embed_dim, embed_dim)\n", " self.attn_drop = nn.Dropout(dropout)\n", " self.scale = math.sqrt(self.head_dim)\n", "\n", " def forward(self, h, gate_features):\n", " B, N, _ = h.shape\n", " hg = torch.cat([h, gate_features], dim=-1)\n", " Q = self.q_proj(hg).view(B, N, self.n_heads, self.head_dim).transpose(1, 2)\n", " K = self.k_proj(hg).view(B, N, self.n_heads, self.head_dim).transpose(1, 2)\n", "\n", " V = self.v_proj(h)\n", " V = (V * self.v_gate(gate_features)).view(B, N, self.n_heads, self.head_dim).transpose(1, 2)\n", "\n", " content_scores = (Q @ K.transpose(-2, -1)) / self.scale\n", " gq = self.gate_q(gate_features)\n", " gk = self.gate_k(gate_features)\n", " compat = torch.einsum('bih,bjh->bhij', gq, gk)\n", "\n", " attn = F.softmax(content_scores + compat, dim=-1)\n", " attn = self.attn_drop(attn)\n", "\n", " out = (attn @ V).transpose(1, 2).reshape(B, N, self.embed_dim)\n", " return self.out_proj(out)\n", "\n", "\n", "class GeometricTransformerBlock(nn.Module):\n", " def __init__(self, embed_dim, gate_dim, n_heads, dropout=0.1, ff_mult=4):\n", " super().__init__()\n", " self.ln1 = nn.LayerNorm(embed_dim)\n", " self.attn = GatedGeometricAttention(embed_dim, gate_dim, n_heads, dropout)\n", " self.ln2 = nn.LayerNorm(embed_dim)\n", " self.ff = nn.Sequential(\n", " nn.Linear(embed_dim, embed_dim * ff_mult), nn.GELU(), nn.Dropout(dropout),\n", " nn.Linear(embed_dim * ff_mult, embed_dim), nn.Dropout(dropout)\n", " )\n", "\n", " def forward(self, h, gate_features):\n", " h = h + self.attn(self.ln1(h), gate_features)\n", " h = h + self.ff(self.ln2(h))\n", " return h\n", "\n", "\n", "# === Main Classifier ==========================================================\n", "\n", "class SuperpositionPatchClassifier(nn.Module):\n", " \"\"\"\n", " Two-tier gated transformer for multi-shape superposition.\n", "\n", " Tier 1 (local): Gates from raw patch embeddings — what IS in this patch\n", " Tier 2 (structural): Gates from post-attention h — what ROLE this patch plays\n", "\n", " Both tiers feed into geometric attention and classification.\n", " \"\"\"\n", "\n", " def __init__(self, embed_dim=128, patch_dim=64, n_bootstrap=2, n_geometric=2,\n", " n_heads=4, dropout=0.1):\n", " super().__init__()\n", " self.embed_dim = embed_dim\n", "\n", " # Patch embedding\n", " self.patch_embed = PatchEmbedding3D(patch_dim)\n", "\n", " # === Stage 0: Local encoder + gate heads (pre-attention) ===\n", " # Shared MLP gives local heads enough capacity to extract\n", " # dims/curvature/boundary from 32 voxels without cross-patch info\n", " local_hidden = patch_dim * 2 # 128\n", " self.local_encoder = nn.Sequential(\n", " nn.Linear(patch_dim, local_hidden), nn.GELU(), nn.Dropout(dropout),\n", " nn.Linear(local_hidden, local_hidden), nn.GELU(), nn.Dropout(dropout),\n", " )\n", " self.local_dim_head = nn.Linear(local_hidden, NUM_LOCAL_DIMS)\n", " self.local_curv_head = nn.Linear(local_hidden, NUM_LOCAL_CURVS)\n", " self.local_bound_head = nn.Linear(local_hidden, NUM_LOCAL_BOUNDARY)\n", " self.local_axis_head = nn.Linear(local_hidden, NUM_LOCAL_AXES)\n", "\n", " # Project [embedding, local_gates] → embed_dim for bootstrap\n", " self.proj = nn.Linear(patch_dim + LOCAL_GATE_DIM, embed_dim)\n", "\n", " # === Stage 1: Bootstrap blocks (attention with local gate context) ===\n", " self.bootstrap_blocks = nn.ModuleList([\n", " TransformerBlock(embed_dim, n_heads, dropout)\n", " for _ in range(n_bootstrap)\n", " ])\n", "\n", " # === Stage 1.5: Structural gate heads (from h, post-attention) ===\n", " self.struct_topo_head = nn.Linear(embed_dim, NUM_STRUCT_TOPO)\n", " self.struct_neighbor_head = nn.Linear(embed_dim, NUM_STRUCT_NEIGHBOR)\n", " self.struct_role_head = nn.Linear(embed_dim, NUM_STRUCT_ROLE)\n", "\n", " # === Stage 2: Geometric gated blocks (see both gate tiers) ===\n", " self.geometric_blocks = nn.ModuleList([\n", " GeometricTransformerBlock(embed_dim, TOTAL_GATE_DIM, n_heads, dropout)\n", " for _ in range(n_geometric)\n", " ])\n", "\n", " # === Stage 3: Gated classification ===\n", " gated_dim = embed_dim + TOTAL_GATE_DIM\n", "\n", " self.patch_shape_head = nn.Sequential(\n", " nn.Linear(gated_dim, embed_dim), nn.GELU(), nn.Dropout(dropout),\n", " nn.Linear(embed_dim, NUM_CLASSES)\n", " )\n", "\n", " self.global_pool = nn.Sequential(\n", " nn.Linear(gated_dim, embed_dim), nn.GELU(),\n", " nn.Linear(embed_dim, embed_dim)\n", " )\n", " self.global_gate_head = nn.Linear(embed_dim, NUM_GATES)\n", " self.global_shape_head = nn.Linear(embed_dim, NUM_CLASSES)\n", "\n", " def forward(self, x):\n", " # === Raw patch embedding ===\n", " e = self.patch_embed(x) # (B, 64, patch_dim)\n", "\n", " # === Stage 0: Local gates from raw embedding via local encoder ===\n", " e_local = self.local_encoder(e) # (B, 64, local_hidden)\n", " local_dim_logits = self.local_dim_head(e_local)\n", " local_curv_logits = self.local_curv_head(e_local)\n", " local_bound_logits = self.local_bound_head(e_local)\n", " local_axis_logits = self.local_axis_head(e_local)\n", "\n", " local_gates = torch.cat([\n", " F.softmax(local_dim_logits, dim=-1),\n", " F.softmax(local_curv_logits, dim=-1),\n", " torch.sigmoid(local_bound_logits),\n", " torch.sigmoid(local_axis_logits),\n", " ], dim=-1) # (B, 64, 11)\n", "\n", " # === Stage 1: Bootstrap with local gate context ===\n", " h = self.proj(torch.cat([e, local_gates], dim=-1))\n", " for blk in self.bootstrap_blocks:\n", " h = blk(h)\n", "\n", " # === Stage 1.5: Structural gates from h (after cross-patch context) ===\n", " struct_topo_logits = self.struct_topo_head(h)\n", " struct_neighbor_logits = self.struct_neighbor_head(h)\n", " struct_role_logits = self.struct_role_head(h)\n", "\n", " structural_gates = torch.cat([\n", " F.softmax(struct_topo_logits, dim=-1),\n", " torch.sigmoid(struct_neighbor_logits),\n", " F.softmax(struct_role_logits, dim=-1),\n", " ], dim=-1) # (B, 64, 6)\n", "\n", " # === Combined gate vector ===\n", " all_gates = torch.cat([local_gates, structural_gates], dim=-1) # (B, 64, 17)\n", "\n", " # === Stage 2: Geometric gated transformer ===\n", " for blk in self.geometric_blocks:\n", " h = blk(h, all_gates)\n", "\n", " # === Stage 3: Classification from gated representations ===\n", " h_gated = torch.cat([h, all_gates], dim=-1)\n", " shape_logits = self.patch_shape_head(h_gated)\n", " g = self.global_pool(h_gated.mean(dim=1))\n", "\n", " return {\n", " # Local gate predictions (Stage 0)\n", " \"local_dim_logits\": local_dim_logits,\n", " \"local_curv_logits\": local_curv_logits,\n", " \"local_bound_logits\": local_bound_logits,\n", " \"local_axis_logits\": local_axis_logits,\n", "\n", " # Structural gate predictions (Stage 1.5)\n", " \"struct_topo_logits\": struct_topo_logits,\n", " \"struct_neighbor_logits\": struct_neighbor_logits,\n", " \"struct_role_logits\": struct_role_logits,\n", "\n", " # Shape predictions (Stage 3)\n", " \"patch_shape_logits\": shape_logits,\n", " \"patch_features\": h,\n", " \"global_features\": g,\n", " \"global_gates\": self.global_gate_head(g),\n", " \"global_shapes\": self.global_shape_head(g),\n", " }\n", "\n", "\n", "# === Loss =====================================================================\n", "\n", "class SuperpositionLoss(nn.Module):\n", " def __init__(self, local_weight=1.0, struct_weight=1.0, shape_weight=1.0, global_weight=0.5):\n", " super().__init__()\n", " self.lw, self.sw, self.shw, self.gw = local_weight, struct_weight, shape_weight, global_weight\n", "\n", " def forward(self, outputs, targets):\n", " occ_mask = targets[\"patch_occupancy\"] > 0.01\n", " n_occ = occ_mask.sum().clamp(min=1)\n", "\n", " # --- Local gate losses ---\n", " dim_loss = F.cross_entropy(\n", " outputs[\"local_dim_logits\"].view(-1, NUM_LOCAL_DIMS),\n", " targets[\"patch_dims\"].clamp(0, NUM_LOCAL_DIMS - 1).view(-1),\n", " reduction='none').view_as(occ_mask)\n", " curv_loss = F.cross_entropy(\n", " outputs[\"local_curv_logits\"].view(-1, NUM_LOCAL_CURVS),\n", " targets[\"patch_curvature\"].clamp(0, NUM_LOCAL_CURVS - 1).view(-1),\n", " reduction='none').view_as(occ_mask)\n", " bound_loss = F.binary_cross_entropy_with_logits(\n", " outputs[\"local_bound_logits\"].squeeze(-1),\n", " targets[\"patch_boundary\"],\n", " reduction='none')\n", " axis_loss = F.binary_cross_entropy_with_logits(\n", " outputs[\"local_axis_logits\"],\n", " targets[\"patch_axis_active\"],\n", " reduction='none').mean(dim=-1)\n", "\n", " local_loss = ((dim_loss + curv_loss + bound_loss + axis_loss) * occ_mask.float()).sum() / n_occ\n", "\n", " # --- Structural gate losses ---\n", " topo_loss = F.cross_entropy(\n", " outputs[\"struct_topo_logits\"].view(-1, NUM_STRUCT_TOPO),\n", " targets[\"patch_topology\"].clamp(0, NUM_STRUCT_TOPO - 1).view(-1),\n", " reduction='none').view_as(occ_mask)\n", " neighbor_loss = F.mse_loss(\n", " torch.sigmoid(outputs[\"struct_neighbor_logits\"].squeeze(-1)),\n", " targets[\"patch_neighbor_count\"],\n", " reduction='none')\n", " role_loss = F.cross_entropy(\n", " outputs[\"struct_role_logits\"].view(-1, NUM_STRUCT_ROLE),\n", " targets[\"patch_surface_role\"].clamp(0, NUM_STRUCT_ROLE - 1).view(-1),\n", " reduction='none').view_as(occ_mask)\n", "\n", " struct_loss = ((topo_loss + neighbor_loss + role_loss) * occ_mask.float()).sum() / n_occ\n", "\n", " # --- Shape losses ---\n", " shape_loss = F.binary_cross_entropy_with_logits(\n", " outputs[\"patch_shape_logits\"],\n", " targets[\"patch_shape_membership\"],\n", " reduction='none').mean(dim=-1)\n", " shape_loss = (shape_loss * occ_mask.float()).sum() / n_occ\n", "\n", " # --- Global losses ---\n", " global_gate_loss = F.binary_cross_entropy_with_logits(outputs[\"global_gates\"], targets[\"global_gates\"])\n", " global_shape_loss = F.binary_cross_entropy_with_logits(outputs[\"global_shapes\"], targets[\"global_shapes\"])\n", " global_loss = global_gate_loss + global_shape_loss\n", "\n", " total = self.lw * local_loss + self.sw * struct_loss + self.shw * shape_loss + self.gw * global_loss\n", "\n", " return {\n", " \"total\": total,\n", " \"local\": local_loss,\n", " \"struct\": struct_loss,\n", " \"shape\": shape_loss,\n", " \"global\": global_loss,\n", " }\n", "\n", "\n", "print(\"✓ Model ready (Two-Tier Gated Transformer)\")" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "v_346ndTOZTK", "outputId": "ab0c58d3-19af-48f9-9019-de6bb21745d7" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "✓ Generator ready | Local: 11d | Structural: 6d | Total: 17d\n" ] } ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "UNO7gDx2IE0V" }, "outputs": [], "source": [ "\"\"\"\n", "TextVAE - Text-to-Geometric-Latent VAE\n", "========================================\n", "Maps flan-t5-small pooled embeddings (512d) → (8, 16, 16) latent space\n", "matching the adapted FLUX VAE latent shape fed to the geometric model.\n", "\n", "The hypothesis: text descriptions contain enough structural information\n", "that when projected into geometric patch space, the geometric model\n", "produces differentiated gate vectors across categories.\n", "\n", "Architecture:\n", " T5 pooled (512d) → Encoder → μ,σ (bottleneck_dim) → Decoder → (8, 16, 16)\n", "\n", "Training target: reconstruct the adapted FLUX latent from its paired prompt.\n", "\"\"\"\n", "\n", "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "from typing import Tuple, Dict\n", "\n", "\n", "class TextVAE(nn.Module):\n", " \"\"\"\n", " Minimal VAE: text embedding → geometric latent space.\n", "\n", " Input: (B, 512) flan-t5-small pooled embedding\n", " Output: (B, 8, 16, 16) adapted FLUX-compatible latent\n", " \"\"\"\n", "\n", " def __init__(\n", " self,\n", " text_dim: int = 512,\n", " latent_channels: int = 8,\n", " spatial_size: int = 16,\n", " bottleneck_dim: int = 256,\n", " hidden_dim: int = 1024,\n", " num_layers: int = 3,\n", " dropout: float = 0.1,\n", " kl_weight: float = 1e-4,\n", " ):\n", " super().__init__()\n", " self.text_dim = text_dim\n", " self.latent_channels = latent_channels\n", " self.spatial_size = spatial_size\n", " self.bottleneck_dim = bottleneck_dim\n", " self.kl_weight = kl_weight\n", "\n", " self.flat_latent = latent_channels * spatial_size * spatial_size # 8*16*16 = 2048\n", "\n", " # ── Encoder: text_dim → bottleneck ──\n", " enc_layers = []\n", " dims = [text_dim] + [hidden_dim] * (num_layers - 1)\n", " for i in range(len(dims) - 1):\n", " enc_layers.extend([\n", " nn.Linear(dims[i], dims[i + 1]),\n", " nn.LayerNorm(dims[i + 1]),\n", " nn.GELU(),\n", " nn.Dropout(dropout),\n", " ])\n", " self.encoder = nn.Sequential(*enc_layers)\n", "\n", " # μ and log_var heads\n", " self.fc_mu = nn.Linear(hidden_dim, bottleneck_dim)\n", " self.fc_logvar = nn.Linear(hidden_dim, bottleneck_dim)\n", "\n", " # ── Decoder: bottleneck → flat_latent → reshape ──\n", " dec_layers = []\n", " dec_dims = [bottleneck_dim] + [hidden_dim] * (num_layers - 1) + [self.flat_latent]\n", " for i in range(len(dec_dims) - 1):\n", " dec_layers.append(nn.Linear(dec_dims[i], dec_dims[i + 1]))\n", " if i < len(dec_dims) - 2: # no activation on final\n", " dec_layers.extend([\n", " nn.LayerNorm(dec_dims[i + 1]),\n", " nn.GELU(),\n", " nn.Dropout(dropout),\n", " ])\n", " self.decoder = nn.Sequential(*dec_layers)\n", "\n", " self._init_weights()\n", "\n", " def _init_weights(self):\n", " for m in self.modules():\n", " if isinstance(m, nn.Linear):\n", " nn.init.kaiming_normal_(m.weight, nonlinearity='relu')\n", " if m.bias is not None:\n", " nn.init.zeros_(m.bias)\n", "\n", " def encode(self, text_emb: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:\n", " \"\"\"(B, 512) → μ, log_var each (B, bottleneck_dim)\"\"\"\n", " h = self.encoder(text_emb)\n", " return self.fc_mu(h), self.fc_logvar(h)\n", "\n", " def reparameterize(self, mu: torch.Tensor, logvar: torch.Tensor) -> torch.Tensor:\n", " if self.training:\n", " std = torch.exp(0.5 * logvar)\n", " eps = torch.randn_like(std)\n", " return mu + eps * std\n", " return mu\n", "\n", " def decode(self, z: torch.Tensor) -> torch.Tensor:\n", " \"\"\"(B, bottleneck_dim) → (B, 8, 16, 16)\"\"\"\n", " flat = self.decoder(z)\n", " return flat.view(-1, self.latent_channels, self.spatial_size, self.spatial_size)\n", "\n", " def forward(self, text_emb: torch.Tensor) -> Dict[str, torch.Tensor]:\n", " \"\"\"\n", " Full forward pass.\n", "\n", " Args:\n", " text_emb: (B, 512) T5 pooled embedding\n", "\n", " Returns:\n", " dict with 'reconstruction', 'mu', 'logvar', 'z'\n", " \"\"\"\n", " mu, logvar = self.encode(text_emb)\n", " z = self.reparameterize(mu, logvar)\n", " recon = self.decode(z)\n", " return {\n", " \"reconstruction\": recon,\n", " \"mu\": mu,\n", " \"logvar\": logvar,\n", " \"z\": z,\n", " }\n", "\n", " def compute_loss(\n", " self,\n", " text_emb: torch.Tensor,\n", " target_latent: torch.Tensor,\n", " ) -> Dict[str, torch.Tensor]:\n", " \"\"\"\n", " Compute VAE loss against target FLUX adapted latent.\n", "\n", " Args:\n", " text_emb: (B, 512) T5 pooled\n", " target_latent: (B, 8, 16, 16) adapted FLUX latent\n", "\n", " Returns:\n", " dict with 'loss', 'recon_loss', 'kl_loss', 'reconstruction'\n", " \"\"\"\n", " out = self.forward(text_emb)\n", " recon = out[\"reconstruction\"]\n", "\n", " # Reconstruction: MSE in latent space\n", " recon_loss = F.mse_loss(recon, target_latent)\n", "\n", " # KL divergence\n", " kl_loss = -0.5 * torch.mean(\n", " 1 + out[\"logvar\"] - out[\"mu\"].pow(2) - out[\"logvar\"].exp()\n", " )\n", "\n", " loss = recon_loss + self.kl_weight * kl_loss\n", "\n", " return {\n", " \"loss\": loss,\n", " \"recon_loss\": recon_loss,\n", " \"kl_loss\": kl_loss,\n", " \"reconstruction\": recon,\n", " }\n", "\n", " @torch.no_grad()\n", " def generate_latent(self, text_emb: torch.Tensor) -> torch.Tensor:\n", " \"\"\"Inference: text → geometric latent. Returns (B, 8, 16, 16).\"\"\"\n", " self.eval()\n", " mu, _ = self.encode(text_emb)\n", " return self.decode(mu)\n", "\n", "\n", "# ── Model info ────────────────────────────────────────────────────────────────\n", "\n", "def count_params(model):\n", " return sum(p.numel() for p in model.parameters())\n", "\n", "if __name__ == \"__main__\":\n", " model = TextVAE()\n", " print(f\"TextVAE: {count_params(model):,} parameters\")\n", " x = torch.randn(4, 512)\n", " out = model(x)\n", " print(f\" Input: {x.shape}\")\n", " print(f\" Output: {out['reconstruction'].shape}\")\n", " print(f\" μ: {out['mu'].shape}\")\n", " print(f\" z: {out['z'].shape}\")" ] }, { "cell_type": "markdown", "source": [ "## train" ], "metadata": { "id": "SHPilNI2IYP3" } }, { "cell_type": "code", "source": [ "\"\"\"\n", "Train TextVAE\n", "==============\n", "Phase 1: Pre-extract T5 embeddings + FLUX adapted latents (paired targets)\n", "Phase 2: Train TextVAE to reconstruct latents from text\n", "Phase 3: Analyze — feed TextVAE outputs through geometric model\n", "\n", "Requires Cell 1 (generator.py) and Cell 2 (model.py) from the geometric\n", "classifier in the Colab namespace.\n", "\"\"\"\n", "\n", "import os, gc, json\n", "import torch\n", "import torch.nn.functional as F\n", "from torch.utils.data import DataLoader, TensorDataset\n", "from tqdm import tqdm\n", "import numpy as np\n", "from collections import Counter\n", "\n", "# ── Config ────────────────────────────────────────────────────────────────────\n", "\n", "DATASET_ID = \"AbstractPhil/synthetic-characters\"\n", "SUBSET = \"schnell_full_1_512\"\n", "VAE_REPO = \"black-forest-labs/FLUX.1-schnell\"\n", "T5_MODEL_ID = \"google/flan-t5-small\"\n", "GEO_REPO = \"AbstractPhil/grid-geometric-multishape\"\n", "GEO_FILE = \"checkpoint_v10/best_model_epoch200.pt\"\n", "\n", "DEVICE = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n", "IMAGE_SIZE = 128\n", "FLUX_SCALE = 0.3611\n", "\n", "VAE_BATCH = 128\n", "T5_BATCH = 256\n", "\n", "EPOCHS = 50\n", "LR = 1e-3\n", "TRAIN_BATCH = 512\n", "VAL_SPLIT = 0.02\n", "SAVE_DIR = \"/content/geovae_proto/text_vae\"\n", "CACHE_DIR = \"/content/geovae_cache\"\n", "\n", "TEXT_DIM = 512\n", "BOTTLENECK_DIM = 256\n", "HIDDEN_DIM = 1024\n", "NUM_LAYERS = 3\n", "KL_WEIGHT = 1e-4\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 1: Pre-extract paired data\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def extract_t5_embeddings(ds):\n", " from transformers import T5EncoderModel, T5Tokenizer\n", "\n", " cache_path = os.path.join(CACHE_DIR, \"t5_embeddings.pt\")\n", " if os.path.exists(cache_path):\n", " print(f\"✓ Cached T5: {cache_path}\")\n", " return torch.load(cache_path, map_location=\"cpu\")\n", "\n", " os.makedirs(CACHE_DIR, exist_ok=True)\n", " print(\"Loading flan-t5-small...\")\n", " tokenizer = T5Tokenizer.from_pretrained(T5_MODEL_ID)\n", " encoder = T5EncoderModel.from_pretrained(T5_MODEL_ID).to(DEVICE).eval()\n", "\n", " prompts = ds[\"prompt\"]\n", " all_embs = []\n", "\n", " for start in tqdm(range(0, len(prompts), T5_BATCH), desc=\"T5 encode\"):\n", " end = min(start + T5_BATCH, len(prompts))\n", " tokens = tokenizer(\n", " prompts[start:end], padding=True, truncation=True,\n", " max_length=128, return_tensors=\"pt\",\n", " ).to(DEVICE)\n", " with torch.no_grad():\n", " out = encoder(**tokens)\n", " mask = tokens[\"attention_mask\"].unsqueeze(-1).float()\n", " pooled = (out.last_hidden_state * mask).sum(1) / mask.sum(1)\n", " all_embs.append(pooled.cpu())\n", "\n", " embs = torch.cat(all_embs)\n", " torch.save(embs, cache_path)\n", " print(f\"✓ T5 embeddings: {embs.shape}\")\n", "\n", " del encoder, tokenizer; gc.collect(); torch.cuda.empty_cache()\n", " return embs\n", "\n", "\n", "def extract_flux_latents(ds):\n", " from diffusers import AutoencoderKL\n", " from torchvision import transforms\n", "\n", " cache_path = os.path.join(CACHE_DIR, \"flux_latents.pt\")\n", " if os.path.exists(cache_path):\n", " print(f\"✓ Cached FLUX latents: {cache_path}\")\n", " return torch.load(cache_path, map_location=\"cpu\")\n", "\n", " os.makedirs(CACHE_DIR, exist_ok=True)\n", " print(\"Loading FLUX VAE (fp16)...\")\n", " vae = AutoencoderKL.from_pretrained(\n", " VAE_REPO, subfolder=\"vae\", torch_dtype=torch.float16,\n", " ).to(DEVICE).eval()\n", "\n", " img_transform = transforms.Compose([\n", " transforms.Resize((IMAGE_SIZE, IMAGE_SIZE)),\n", " transforms.ToTensor(),\n", " transforms.Normalize([0.5]*3, [0.5]*3),\n", " ])\n", "\n", " class ImgDS(torch.utils.data.Dataset):\n", " def __init__(self, hf_ds): self.ds = hf_ds\n", " def __len__(self): return len(self.ds)\n", " def __getitem__(self, i):\n", " try: return img_transform(self.ds[i][\"image\"].convert(\"RGB\"))\n", " except: return torch.zeros(3, IMAGE_SIZE, IMAGE_SIZE)\n", "\n", " loader = DataLoader(ImgDS(ds), batch_size=VAE_BATCH, shuffle=False,\n", " num_workers=8, pin_memory=True, prefetch_factor=4,\n", " persistent_workers=True)\n", "\n", " all_latents = []\n", " for batch in tqdm(loader, desc=\"VAE encode\"):\n", " batch = batch.to(DEVICE, non_blocking=True)\n", " with torch.no_grad(), torch.cuda.amp.autocast():\n", " z = vae.encode(batch.half()).latent_dist.sample() * FLUX_SCALE\n", " z = z.float()\n", " B, C, H, W = z.shape\n", " if H != 16 or W != 16:\n", " z = F.interpolate(z, size=(16, 16), mode='bilinear', align_corners=False)\n", " if C == 16:\n", " z = z.view(B, 8, 2, 16, 16).mean(dim=2)\n", " all_latents.append(z.cpu())\n", "\n", " latents = torch.cat(all_latents)\n", " torch.save(latents, cache_path)\n", " print(f\"✓ FLUX latents: {latents.shape}\")\n", "\n", " del vae; gc.collect(); torch.cuda.empty_cache()\n", " return latents\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 2: Train TextVAE\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def train_text_vae(t5_embs, flux_latents, categories):\n", "\n", " os.makedirs(SAVE_DIR, exist_ok=True)\n", " N = t5_embs.shape[0]\n", "\n", " n_val = max(int(N * VAL_SPLIT), 256)\n", " perm = torch.randperm(N)\n", " val_idx, train_idx = perm[:n_val], perm[n_val:]\n", " val_cats = [categories[i] for i in val_idx.tolist()]\n", "\n", " train_loader = DataLoader(\n", " TensorDataset(t5_embs[train_idx], flux_latents[train_idx]),\n", " batch_size=TRAIN_BATCH, shuffle=True, num_workers=4, pin_memory=True)\n", " val_loader = DataLoader(\n", " TensorDataset(t5_embs[val_idx], flux_latents[val_idx]),\n", " batch_size=TRAIN_BATCH, shuffle=False, num_workers=2, pin_memory=True)\n", "\n", " model = TextVAE(\n", " text_dim=TEXT_DIM, bottleneck_dim=BOTTLENECK_DIM,\n", " hidden_dim=HIDDEN_DIM, num_layers=NUM_LAYERS, kl_weight=KL_WEIGHT,\n", " ).to(DEVICE)\n", " print(f\"TextVAE: {sum(p.numel() for p in model.parameters()):,} params\")\n", " print(f\"Train: {len(train_idx)}, Val: {len(val_idx)}\")\n", "\n", " optimizer = torch.optim.AdamW(model.parameters(), lr=LR, weight_decay=1e-5)\n", " scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=EPOCHS)\n", "\n", " best_val = float(\"inf\")\n", " history = []\n", "\n", " for epoch in range(1, EPOCHS + 1):\n", " model.train()\n", " tr, tk, tt, n = 0, 0, 0, 0\n", " for tb, lb in train_loader:\n", " tb, lb = tb.to(DEVICE, non_blocking=True), lb.to(DEVICE, non_blocking=True)\n", " out = model.compute_loss(tb, lb)\n", " optimizer.zero_grad(); out[\"loss\"].backward()\n", " torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)\n", " optimizer.step()\n", " bs = tb.shape[0]\n", " tr += out[\"recon_loss\"].item() * bs\n", " tk += out[\"kl_loss\"].item() * bs\n", " tt += out[\"loss\"].item() * bs\n", " n += bs\n", " scheduler.step()\n", "\n", " model.eval()\n", " vr, vk, vt, nv = 0, 0, 0, 0\n", " with torch.no_grad():\n", " for tb, lb in val_loader:\n", " tb, lb = tb.to(DEVICE, non_blocking=True), lb.to(DEVICE, non_blocking=True)\n", " out = model.compute_loss(tb, lb)\n", " bs = tb.shape[0]\n", " vr += out[\"recon_loss\"].item() * bs\n", " vk += out[\"kl_loss\"].item() * bs\n", " vt += out[\"loss\"].item() * bs\n", " nv += bs\n", "\n", " tr, tk, tt = tr/n, tk/n, tt/n\n", " vr, vk, vt = vr/nv, vk/nv, vt/nv\n", "\n", " history.append({\"epoch\": epoch, \"train_recon\": tr, \"train_kl\": tk,\n", " \"val_recon\": vr, \"val_kl\": vk, \"val_total\": vt})\n", "\n", " tag = \" ✓ BEST\" if vt < best_val else \"\"\n", " print(f\" Epoch {epoch:3d} train={tt:.5f}(r={tr:.5f} kl={tk:.2f}) val={vt:.5f}(r={vr:.5f}){tag}\")\n", "\n", " if vt < best_val:\n", " best_val = vt\n", " torch.save({\"model_state_dict\": model.state_dict(), \"epoch\": epoch,\n", " \"val_loss\": vt, \"config\": {\"text_dim\": TEXT_DIM,\n", " \"bottleneck_dim\": BOTTLENECK_DIM, \"hidden_dim\": HIDDEN_DIM,\n", " \"num_layers\": NUM_LAYERS, \"kl_weight\": KL_WEIGHT}},\n", " os.path.join(SAVE_DIR, \"best_model.pt\"))\n", "\n", " with open(os.path.join(SAVE_DIR, \"training_history.json\"), \"w\") as f:\n", " json.dump(history, f, indent=2)\n", "\n", " # Save val split for analysis\n", " torch.save({\"val_idx\": val_idx, \"val_categories\": val_cats,\n", " \"t5_embs_val\": t5_embs[val_idx]},\n", " os.path.join(SAVE_DIR, \"val_data.pt\"))\n", "\n", " print(f\"\\n✓ Best val loss: {best_val:.5f}\")\n", " return model\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 3: Analyze — text patches through geometric model\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def load_geometric_model():\n", " from huggingface_hub import hf_hub_download\n", " path = hf_hub_download(repo_id=GEO_REPO, filename=GEO_FILE)\n", " ckpt = torch.load(path, map_location=DEVICE, weights_only=False)\n", " cfg = ckpt[\"config\"]\n", " model = SuperpositionPatchClassifier(\n", " embed_dim=cfg[\"embed_dim\"], patch_dim=cfg[\"patch_dim\"],\n", " n_bootstrap=cfg[\"n_bootstrap\"], n_geometric=cfg[\"n_geometric\"],\n", " n_heads=cfg[\"n_heads\"], dropout=0.0,\n", " ).to(DEVICE).eval()\n", " model.load_state_dict(ckpt[\"model_state_dict\"])\n", " print(f\"✓ Geometric model (epoch {ckpt['epoch']})\")\n", " return model\n", "\n", "\n", "@torch.no_grad()\n", "def extract_geometric_features(latents, geo_model, batch_size=256):\n", " all_gates, all_patch = [], []\n", " for s in range(0, latents.shape[0], batch_size):\n", " batch = latents[s:s+batch_size].to(DEVICE)\n", " out = geo_model(batch)\n", " local = torch.cat([\n", " F.softmax(out[\"local_dim_logits\"], dim=-1),\n", " F.softmax(out[\"local_curv_logits\"], dim=-1),\n", " torch.sigmoid(out[\"local_bound_logits\"]),\n", " torch.sigmoid(out[\"local_axis_logits\"]),\n", " ], dim=-1)\n", " struct = torch.cat([\n", " F.softmax(out[\"struct_topo_logits\"], dim=-1),\n", " torch.sigmoid(out[\"struct_neighbor_logits\"]),\n", " F.softmax(out[\"struct_role_logits\"], dim=-1),\n", " ], dim=-1)\n", " all_gates.append(torch.cat([local, struct], dim=-1).cpu())\n", " all_patch.append(out[\"patch_features\"].cpu())\n", " return torch.cat(all_gates), torch.cat(all_patch)\n", "\n", "\n", "def compute_discriminability(features_flat, categories, min_n=10):\n", " \"\"\"Compute per-category discriminability on GPU.\"\"\"\n", " cat_counts = Counter(categories)\n", " cat_names = sorted([c for c, n in cat_counts.items() if n >= min_n])\n", " cat_array = np.array(categories)\n", " cat_indices = {c: torch.from_numpy(np.where(cat_array == c)[0]).long() for c in cat_names}\n", " sizes = {c: len(cat_indices[c]) for c in cat_names}\n", " total = sum(sizes.values())\n", "\n", " feat_gpu = F.normalize(features_flat.to(DEVICE), dim=-1)\n", " n = len(cat_names)\n", " mat = np.zeros((n, n))\n", "\n", " for i, ci in enumerate(cat_names):\n", " fi = feat_gpu[cat_indices[ci]]\n", " for j, cj in enumerate(cat_names):\n", " if j < i:\n", " mat[i, j] = mat[j, i]; continue\n", " fj = feat_gpu[cat_indices[cj]]\n", " sim = fi @ fj.T\n", " if i == j:\n", " mask = ~torch.eye(sim.shape[0], dtype=torch.bool, device=DEVICE)\n", " val = sim[mask].mean().item() if mask.sum() > 0 else 1.0\n", " else:\n", " val = sim.mean().item()\n", " mat[i, j] = val\n", " if j > i: mat[j, i] = val\n", "\n", " del feat_gpu; torch.cuda.empty_cache()\n", "\n", " disc = {}\n", " for i, ci in enumerate(cat_names):\n", " cross_sum, cross_n = 0.0, 0\n", " for j, cj in enumerate(cat_names):\n", " if i != j:\n", " cross_sum += mat[i, j] * sizes[cj]\n", " cross_n += sizes[cj]\n", " disc[ci] = float(mat[i, i] - cross_sum / max(cross_n, 1))\n", "\n", " overall = sum(disc[c] * sizes[c] / total for c in cat_names)\n", " return {\"overall\": overall, \"per_category\": disc, \"matrix\": mat.tolist(),\n", " \"cat_names\": cat_names, \"sizes\": sizes}\n", "\n", "\n", "def analyze(text_vae_model):\n", " \"\"\"The Rosetta Stone test: text-only geometric differentiation.\"\"\"\n", " print(\"\\n\" + \"=\" * 60)\n", " print(\"Phase 3: Geometric Analysis of Text Patches\")\n", " print(\"=\" * 60)\n", "\n", " # Load val data\n", " val_data = torch.load(os.path.join(SAVE_DIR, \"val_data.pt\"),\n", " map_location=\"cpu\", weights_only=False)\n", " t5_val = val_data[\"t5_embs_val\"]\n", " categories = val_data[\"val_categories\"]\n", " N = t5_val.shape[0]\n", " print(f\"Analyzing {N} text-derived patches\")\n", "\n", " # Text → TextVAE → patches\n", " print(\"Generating patches from text...\")\n", " text_vae_model.eval()\n", " text_latents = text_vae_model.generate_latent(t5_val.to(DEVICE)).cpu()\n", " print(f\" Patches: {text_latents.shape}\")\n", "\n", " # Patches → Geometric model → features\n", " print(\"Extracting geometric features...\")\n", " geo_model = load_geometric_model()\n", " gates, patch_feats = extract_geometric_features(text_latents, geo_model)\n", " del geo_model; gc.collect(); torch.cuda.empty_cache()\n", "\n", " # Discriminability\n", " results = {}\n", " for name, feat in [(\"gate_vectors\", gates.flatten(1)),\n", " (\"patch_feat\", patch_feats.flatten(1)),\n", " (\"global_feat\", patch_feats.mean(dim=1))]:\n", " print(f\"\\n {name}:\")\n", " d = compute_discriminability(feat, categories)\n", " results[name] = d\n", " print(f\" Overall discriminability: {d['overall']:+.4f}\")\n", "\n", " sorted_d = sorted(d[\"per_category\"].items(), key=lambda x: -x[1])\n", " for c, v in sorted_d[:5]:\n", " print(f\" {c:30s} {v:+.4f} (n={d['sizes'].get(c, 0)})\")\n", " print(f\" ...\")\n", " for c, v in sorted_d[-3:]:\n", " print(f\" {c:30s} {v:+.4f} (n={d['sizes'].get(c, 0)})\")\n", "\n", " # Save\n", " eval_dir = os.path.join(SAVE_DIR, \"eval\")\n", " os.makedirs(eval_dir, exist_ok=True)\n", " with open(os.path.join(eval_dir, \"text_geometric_results.json\"), \"w\") as f:\n", " json.dump(results, f, indent=2)\n", "\n", " torch.save({\"gates\": gates, \"patch_feats\": patch_feats,\n", " \"categories\": categories, \"text_latents\": text_latents},\n", " os.path.join(eval_dir, \"text_geometric_features.pt\"))\n", "\n", " print(f\"\\n✓ Results saved to {eval_dir}\")\n", " return results\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Main\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def run():\n", " from datasets import load_dataset\n", "\n", " print(f\"Loading {DATASET_ID} / {SUBSET}...\")\n", " ds = load_dataset(DATASET_ID, SUBSET, split=\"train\")\n", " print(f\"✓ {len(ds)} samples\")\n", " categories = ds[\"generator_type\"]\n", "\n", " # Phase 1\n", " t5_embs = extract_t5_embeddings(ds)\n", " flux_latents = extract_flux_latents(ds)\n", "\n", " # Phase 2\n", " model = train_text_vae(t5_embs, flux_latents, categories)\n", "\n", " # Free FLUX latents — not needed for analysis\n", " del flux_latents; gc.collect()\n", "\n", " # Phase 3\n", " results = analyze(model)\n", " return results\n", "\n", "results = run()" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 1000, "referenced_widgets": [ "a824c569d05747f0b97baef1bc7667db", "c4ef0c45b6334bb08bd79534056fd2ba", "edb63a98fc79464b9bf7e32b0371133c", "0bfff8686207433d877ba2b89b9f3651", "d144fa9116f5401aa3b4e95c43699edd", "d8762155a1514783b42f8bdb14e143df", "5ce5f95d8d054118a8decf6e98eb9c8f", "83b6ac6b6fbb453190c88afc083746af", "c14c2ebf72844828a5c4480c531bc97d", "12c242032efc43dbb14b71dfab5d6151", "b356594746ab4986ab16a6eb56f4bfa8", "7d0bb0ba501844ae8c02b465bb32e34b", "becd12fb40eb4bb09a08255dbe0274e0", "994708116d614ed78c330a1ad765acee", "7acf229a9b70471fae35644098646bbf", "557b3e8e959e41d187af9ee37e426421", "d785c61c21e245d4bdc8a9bc952400ca", "75742c0449be4e6aae7e56e589e45ac2", "7c4e156100704ce3bffbb17655f74bba", "50f5f99b993a4d8d9ec4137218e5c904", "7a243d14bbf545f8aac4137b7c4615f7", "85b52b98c3fc491eae517d49f4fc819b", "d5da1dd9be2b4cbfad8548fda97da2b9", "2095591243aa4fbbb2630de2d7b9c17e", "afa0b24df2944a21ab49876f66da9178", "98150018b6b84fc6ade2b02b73c5eebc", "548643ef1a0e48f08aba06fce887212c", "caa91bf2983845d487cbb6730a2f3331", "ed9bf7e941b9435391c51b58eb960110", "2447e20093bf48adaa18229aef5a1864", "6baa7af909904d57b98e9a41f613ab2a", "c11b2bbb7f364ea69a0efea9d939c2f9", "b4e89d997855457195890c48b7e3424a" ] }, "id": "iWlKN3E6IaGM", "outputId": "5f9df95b-afe8-4b10-c16f-9a9a10f14723" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Loading AbstractPhil/synthetic-characters / schnell_full_1_512...\n" ] }, { "output_type": "display_data", "data": { "text/plain": [ "Resolving data files: 0%| | 0/66 [00:00 Tuple[torch.Tensor, torch.Tensor]:\n", " h = self.encoder(text_emb)\n", " return self.fc_mu(h), self.fc_logvar(h)\n", "\n", " def reparameterize(self, mu: torch.Tensor, logvar: torch.Tensor) -> torch.Tensor:\n", " if self.training:\n", " std = torch.exp(0.5 * logvar)\n", " return mu + torch.randn_like(std) * std\n", " return mu\n", "\n", " def decode(self, z: torch.Tensor) -> torch.Tensor:\n", " flat = self.decoder(z)\n", " return flat.view(-1, self.latent_channels, self.spatial_size, self.spatial_size)\n", "\n", " def forward(self, text_emb: torch.Tensor) -> Dict[str, torch.Tensor]:\n", " mu, logvar = self.encode(text_emb)\n", " z = self.reparameterize(mu, logvar)\n", " recon = self.decode(z)\n", " return {\"reconstruction\": recon, \"mu\": mu, \"logvar\": logvar, \"z\": z}\n", "\n", " def compute_loss(self, text_emb: torch.Tensor, target: torch.Tensor) -> Dict[str, torch.Tensor]:\n", " out = self.forward(text_emb)\n", " recon_loss = F.mse_loss(out[\"reconstruction\"], target)\n", " kl_loss = -0.5 * torch.mean(1 + out[\"logvar\"] - out[\"mu\"].pow(2) - out[\"logvar\"].exp())\n", " loss = recon_loss + self.kl_weight * kl_loss\n", " return {\"loss\": loss, \"recon_loss\": recon_loss, \"kl_loss\": kl_loss, \"reconstruction\": out[\"reconstruction\"]}\n", "\n", " @torch.no_grad()\n", " def generate_latent(self, text_emb: torch.Tensor) -> torch.Tensor:\n", " self.eval()\n", " mu, _ = self.encode(text_emb)\n", " return self.decode(mu)" ], "metadata": { "id": "d_2bj8PfQHZA" }, "execution_count": null, "outputs": [] }, { "cell_type": "markdown", "source": [ "## train" ], "metadata": { "id": "CBmex2-0QI7c" } }, { "cell_type": "code", "source": [ "\"\"\"\n", "Train BertVAE\n", "==============\n", "Same pipeline as TextVAE but with bert-base-uncased [CLS] embeddings (768d).\n", "Reuses cached FLUX latents from text_vae run if available.\n", "\n", "Requires Cell 1 (generator.py) and Cell 2 (model.py) from geometric classifier.\n", "\"\"\"\n", "\n", "import os, gc, json\n", "import torch\n", "import torch.nn.functional as F\n", "from torch.utils.data import DataLoader, TensorDataset\n", "from tqdm import tqdm\n", "import numpy as np\n", "from collections import Counter\n", "\n", "# ── Config ────────────────────────────────────────────────────────────────────\n", "\n", "DATASET_ID = \"AbstractPhil/synthetic-characters\"\n", "SUBSET = \"schnell_full_1_512\"\n", "VAE_REPO = \"black-forest-labs/FLUX.1-schnell\"\n", "BERT_MODEL_ID = \"bert-base-uncased\"\n", "GEO_REPO = \"AbstractPhil/grid-geometric-multishape\"\n", "GEO_FILE = \"checkpoint_v10/best_model_epoch200.pt\"\n", "\n", "DEVICE = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n", "IMAGE_SIZE = 128\n", "FLUX_SCALE = 0.3611\n", "\n", "VAE_BATCH = 128\n", "BERT_BATCH = 256\n", "\n", "EPOCHS = 50\n", "LR = 1e-3\n", "TRAIN_BATCH = 512\n", "VAL_SPLIT = 0.02\n", "SAVE_DIR = \"/content/geovae_proto/bert_vae\"\n", "CACHE_DIR = \"/content/geovae_cache\"\n", "\n", "TEXT_DIM = 768 # bert-base hidden size\n", "BOTTLENECK_DIM = 256\n", "HIDDEN_DIM = 1024\n", "NUM_LAYERS = 3\n", "KL_WEIGHT = 1e-4\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 1: Pre-extract\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def extract_bert_embeddings(ds):\n", " from transformers import BertModel, BertTokenizer\n", "\n", " cache_path = os.path.join(CACHE_DIR, \"bert_embeddings.pt\")\n", " if os.path.exists(cache_path):\n", " print(f\"✓ Cached BERT: {cache_path}\")\n", " return torch.load(cache_path, map_location=\"cpu\")\n", "\n", " os.makedirs(CACHE_DIR, exist_ok=True)\n", " print(\"Loading bert-base-uncased...\")\n", " tokenizer = BertTokenizer.from_pretrained(BERT_MODEL_ID)\n", " encoder = BertModel.from_pretrained(BERT_MODEL_ID).to(DEVICE).eval()\n", "\n", " prompts = ds[\"prompt\"]\n", " all_embs = []\n", "\n", " for start in tqdm(range(0, len(prompts), BERT_BATCH), desc=\"BERT encode\"):\n", " end = min(start + BERT_BATCH, len(prompts))\n", " tokens = tokenizer(\n", " prompts[start:end], padding=True, truncation=True,\n", " max_length=128, return_tensors=\"pt\",\n", " ).to(DEVICE)\n", " with torch.no_grad():\n", " out = encoder(**tokens)\n", " # [CLS] token embedding\n", " cls_emb = out.last_hidden_state[:, 0, :]\n", " all_embs.append(cls_emb.cpu())\n", "\n", " embs = torch.cat(all_embs)\n", " torch.save(embs, cache_path)\n", " print(f\"✓ BERT embeddings: {embs.shape}\")\n", "\n", " del encoder, tokenizer; gc.collect(); torch.cuda.empty_cache()\n", " return embs\n", "\n", "\n", "def extract_flux_latents(ds):\n", " \"\"\"Reuse cached latents from text_vae run if available.\"\"\"\n", " from diffusers import AutoencoderKL\n", " from torchvision import transforms\n", "\n", " cache_path = os.path.join(CACHE_DIR, \"flux_latents.pt\")\n", " if os.path.exists(cache_path):\n", " print(f\"✓ Cached FLUX latents: {cache_path}\")\n", " return torch.load(cache_path, map_location=\"cpu\")\n", "\n", " os.makedirs(CACHE_DIR, exist_ok=True)\n", " print(\"Loading FLUX VAE (fp16)...\")\n", " vae = AutoencoderKL.from_pretrained(\n", " VAE_REPO, subfolder=\"vae\", torch_dtype=torch.float16,\n", " ).to(DEVICE).eval()\n", "\n", " img_transform = transforms.Compose([\n", " transforms.Resize((IMAGE_SIZE, IMAGE_SIZE)),\n", " transforms.ToTensor(),\n", " transforms.Normalize([0.5]*3, [0.5]*3),\n", " ])\n", "\n", " class ImgDS(torch.utils.data.Dataset):\n", " def __init__(self, hf_ds): self.ds = hf_ds\n", " def __len__(self): return len(self.ds)\n", " def __getitem__(self, i):\n", " try: return img_transform(self.ds[i][\"image\"].convert(\"RGB\"))\n", " except: return torch.zeros(3, IMAGE_SIZE, IMAGE_SIZE)\n", "\n", " loader = DataLoader(ImgDS(ds), batch_size=VAE_BATCH, shuffle=False,\n", " num_workers=8, pin_memory=True, prefetch_factor=4,\n", " persistent_workers=True)\n", "\n", " all_latents = []\n", " for batch in tqdm(loader, desc=\"VAE encode\"):\n", " batch = batch.to(DEVICE, non_blocking=True)\n", " with torch.no_grad(), torch.cuda.amp.autocast():\n", " z = vae.encode(batch.half()).latent_dist.sample() * FLUX_SCALE\n", " z = z.float()\n", " B, C, H, W = z.shape\n", " if H != 16 or W != 16:\n", " z = F.interpolate(z, size=(16, 16), mode='bilinear', align_corners=False)\n", " if C == 16:\n", " z = z.view(B, 8, 2, 16, 16).mean(dim=2)\n", " all_latents.append(z.cpu())\n", "\n", " latents = torch.cat(all_latents)\n", " torch.save(latents, cache_path)\n", " print(f\"✓ FLUX latents: {latents.shape}\")\n", "\n", " del vae; gc.collect(); torch.cuda.empty_cache()\n", " return latents\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 2: Train BertVAE\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def train_bert_vae(bert_embs, flux_latents, categories):\n", "\n", " os.makedirs(SAVE_DIR, exist_ok=True)\n", " N = bert_embs.shape[0]\n", "\n", " n_val = max(int(N * VAL_SPLIT), 256)\n", " perm = torch.randperm(N)\n", " val_idx, train_idx = perm[:n_val], perm[n_val:]\n", " val_cats = [categories[i] for i in val_idx.tolist()]\n", "\n", " train_loader = DataLoader(\n", " TensorDataset(bert_embs[train_idx], flux_latents[train_idx]),\n", " batch_size=TRAIN_BATCH, shuffle=True, num_workers=4, pin_memory=True)\n", " val_loader = DataLoader(\n", " TensorDataset(bert_embs[val_idx], flux_latents[val_idx]),\n", " batch_size=TRAIN_BATCH, shuffle=False, num_workers=2, pin_memory=True)\n", "\n", " model = BertVAE(\n", " text_dim=TEXT_DIM, bottleneck_dim=BOTTLENECK_DIM,\n", " hidden_dim=HIDDEN_DIM, num_layers=NUM_LAYERS, kl_weight=KL_WEIGHT,\n", " ).to(DEVICE)\n", " print(f\"BertVAE: {sum(p.numel() for p in model.parameters()):,} params\")\n", " print(f\"Train: {len(train_idx)}, Val: {len(val_idx)}\")\n", "\n", " optimizer = torch.optim.AdamW(model.parameters(), lr=LR, weight_decay=1e-5)\n", " scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=EPOCHS)\n", "\n", " best_val = float(\"inf\")\n", " history = []\n", "\n", " for epoch in range(1, EPOCHS + 1):\n", " model.train()\n", " tr, tk, tt, n = 0, 0, 0, 0\n", " for tb, lb in train_loader:\n", " tb, lb = tb.to(DEVICE, non_blocking=True), lb.to(DEVICE, non_blocking=True)\n", " out = model.compute_loss(tb, lb)\n", " optimizer.zero_grad(); out[\"loss\"].backward()\n", " torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)\n", " optimizer.step()\n", " bs = tb.shape[0]\n", " tr += out[\"recon_loss\"].item() * bs\n", " tk += out[\"kl_loss\"].item() * bs\n", " tt += out[\"loss\"].item() * bs\n", " n += bs\n", " scheduler.step()\n", "\n", " model.eval()\n", " vr, vk, vt, nv = 0, 0, 0, 0\n", " with torch.no_grad():\n", " for tb, lb in val_loader:\n", " tb, lb = tb.to(DEVICE, non_blocking=True), lb.to(DEVICE, non_blocking=True)\n", " out = model.compute_loss(tb, lb)\n", " bs = tb.shape[0]\n", " vr += out[\"recon_loss\"].item() * bs\n", " vk += out[\"kl_loss\"].item() * bs\n", " vt += out[\"loss\"].item() * bs\n", " nv += bs\n", "\n", " tr, tk, tt = tr/n, tk/n, tt/n\n", " vr, vk, vt = vr/nv, vk/nv, vt/nv\n", "\n", " history.append({\"epoch\": epoch, \"train_recon\": tr, \"train_kl\": tk,\n", " \"val_recon\": vr, \"val_kl\": vk, \"val_total\": vt})\n", "\n", " tag = \" ✓ BEST\" if vt < best_val else \"\"\n", " print(f\" Epoch {epoch:3d} train={tt:.5f}(r={tr:.5f} kl={tk:.2f}) val={vt:.5f}(r={vr:.5f}){tag}\")\n", "\n", " if vt < best_val:\n", " best_val = vt\n", " torch.save({\"model_state_dict\": model.state_dict(), \"epoch\": epoch,\n", " \"val_loss\": vt, \"config\": {\"text_dim\": TEXT_DIM,\n", " \"bottleneck_dim\": BOTTLENECK_DIM, \"hidden_dim\": HIDDEN_DIM,\n", " \"num_layers\": NUM_LAYERS, \"kl_weight\": KL_WEIGHT}},\n", " os.path.join(SAVE_DIR, \"best_model.pt\"))\n", "\n", " with open(os.path.join(SAVE_DIR, \"training_history.json\"), \"w\") as f:\n", " json.dump(history, f, indent=2)\n", "\n", " torch.save({\"val_idx\": val_idx, \"val_categories\": val_cats,\n", " \"bert_embs_val\": bert_embs[val_idx]},\n", " os.path.join(SAVE_DIR, \"val_data.pt\"))\n", "\n", " print(f\"\\n✓ Best val loss: {best_val:.5f}\")\n", " return model\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 3: Analyze\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def load_geometric_model():\n", " from huggingface_hub import hf_hub_download\n", " path = hf_hub_download(repo_id=GEO_REPO, filename=GEO_FILE)\n", " ckpt = torch.load(path, map_location=DEVICE, weights_only=False)\n", " cfg = ckpt[\"config\"]\n", " model = SuperpositionPatchClassifier(\n", " embed_dim=cfg[\"embed_dim\"], patch_dim=cfg[\"patch_dim\"],\n", " n_bootstrap=cfg[\"n_bootstrap\"], n_geometric=cfg[\"n_geometric\"],\n", " n_heads=cfg[\"n_heads\"], dropout=0.0,\n", " ).to(DEVICE).eval()\n", " model.load_state_dict(ckpt[\"model_state_dict\"])\n", " print(f\"✓ Geometric model (epoch {ckpt['epoch']})\")\n", " return model\n", "\n", "\n", "@torch.no_grad()\n", "def extract_geometric_features(latents, geo_model, batch_size=256):\n", " all_gates, all_patch = [], []\n", " for s in range(0, latents.shape[0], batch_size):\n", " batch = latents[s:s+batch_size].to(DEVICE)\n", " out = geo_model(batch)\n", " local = torch.cat([\n", " F.softmax(out[\"local_dim_logits\"], dim=-1),\n", " F.softmax(out[\"local_curv_logits\"], dim=-1),\n", " torch.sigmoid(out[\"local_bound_logits\"]),\n", " torch.sigmoid(out[\"local_axis_logits\"]),\n", " ], dim=-1)\n", " struct = torch.cat([\n", " F.softmax(out[\"struct_topo_logits\"], dim=-1),\n", " torch.sigmoid(out[\"struct_neighbor_logits\"]),\n", " F.softmax(out[\"struct_role_logits\"], dim=-1),\n", " ], dim=-1)\n", " all_gates.append(torch.cat([local, struct], dim=-1).cpu())\n", " all_patch.append(out[\"patch_features\"].cpu())\n", " return torch.cat(all_gates), torch.cat(all_patch)\n", "\n", "\n", "def compute_discriminability(features_flat, categories, min_n=10):\n", " cat_counts = Counter(categories)\n", " cat_names = sorted([c for c, n in cat_counts.items() if n >= min_n])\n", " cat_array = np.array(categories)\n", " cat_indices = {c: torch.from_numpy(np.where(cat_array == c)[0]).long() for c in cat_names}\n", " sizes = {c: len(cat_indices[c]) for c in cat_names}\n", " total = sum(sizes.values())\n", "\n", " feat_gpu = F.normalize(features_flat.to(DEVICE), dim=-1)\n", " n = len(cat_names)\n", " mat = np.zeros((n, n))\n", "\n", " for i, ci in enumerate(cat_names):\n", " fi = feat_gpu[cat_indices[ci]]\n", " for j, cj in enumerate(cat_names):\n", " if j < i:\n", " mat[i, j] = mat[j, i]; continue\n", " fj = feat_gpu[cat_indices[cj]]\n", " sim = fi @ fj.T\n", " if i == j:\n", " mask = ~torch.eye(sim.shape[0], dtype=torch.bool, device=DEVICE)\n", " val = sim[mask].mean().item() if mask.sum() > 0 else 1.0\n", " else:\n", " val = sim.mean().item()\n", " mat[i, j] = val\n", " if j > i: mat[j, i] = val\n", "\n", " del feat_gpu; torch.cuda.empty_cache()\n", "\n", " disc = {}\n", " for i, ci in enumerate(cat_names):\n", " cross_sum, cross_n = 0.0, 0\n", " for j, cj in enumerate(cat_names):\n", " if i != j:\n", " cross_sum += mat[i, j] * sizes[cj]\n", " cross_n += sizes[cj]\n", " disc[ci] = float(mat[i, i] - cross_sum / max(cross_n, 1))\n", "\n", " overall = sum(disc[c] * sizes[c] / total for c in cat_names)\n", " return {\"overall\": overall, \"per_category\": disc, \"matrix\": mat.tolist(),\n", " \"cat_names\": cat_names, \"sizes\": sizes}\n", "\n", "\n", "def analyze(bert_vae_model):\n", " print(\"\\n\" + \"=\" * 60)\n", " print(\"Phase 3: BERT Geometric Analysis\")\n", " print(\"=\" * 60)\n", "\n", " val_data = torch.load(os.path.join(SAVE_DIR, \"val_data.pt\"),\n", " map_location=\"cpu\", weights_only=False)\n", " bert_val = val_data[\"bert_embs_val\"]\n", " categories = val_data[\"val_categories\"]\n", " print(f\"Analyzing {bert_val.shape[0]} text-derived patches (BERT)\")\n", "\n", " # BERT → BertVAE → patches\n", " bert_vae_model.eval()\n", " text_latents = bert_vae_model.generate_latent(bert_val.to(DEVICE)).cpu()\n", " print(f\" Patches: {text_latents.shape}\")\n", "\n", " # Patches → Geometric model\n", " geo_model = load_geometric_model()\n", " gates, patch_feats = extract_geometric_features(text_latents, geo_model)\n", " del geo_model; gc.collect(); torch.cuda.empty_cache()\n", "\n", " results = {}\n", " for name, feat in [(\"gate_vectors\", gates.flatten(1)),\n", " (\"patch_feat\", patch_feats.flatten(1)),\n", " (\"global_feat\", patch_feats.mean(dim=1))]:\n", " print(f\"\\n {name}:\")\n", " d = compute_discriminability(feat, categories)\n", " results[name] = d\n", " print(f\" Overall discriminability: {d['overall']:+.4f}\")\n", "\n", " sorted_d = sorted(d[\"per_category\"].items(), key=lambda x: -x[1])\n", " for c, v in sorted_d[:5]:\n", " print(f\" {c:30s} {v:+.4f} (n={d['sizes'].get(c, 0)})\")\n", " print(f\" ...\")\n", " for c, v in sorted_d[-3:]:\n", " print(f\" {c:30s} {v:+.4f} (n={d['sizes'].get(c, 0)})\")\n", "\n", " eval_dir = os.path.join(SAVE_DIR, \"eval\")\n", " os.makedirs(eval_dir, exist_ok=True)\n", " with open(os.path.join(eval_dir, \"bert_geometric_results.json\"), \"w\") as f:\n", " json.dump(results, f, indent=2)\n", " torch.save({\"gates\": gates, \"patch_feats\": patch_feats,\n", " \"categories\": categories, \"text_latents\": text_latents},\n", " os.path.join(eval_dir, \"bert_geometric_features.pt\"))\n", "\n", " print(f\"\\n✓ Results saved to {eval_dir}\")\n", " return results\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Main\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def run():\n", " from datasets import load_dataset\n", "\n", " print(f\"Loading {DATASET_ID} / {SUBSET}...\")\n", " ds = load_dataset(DATASET_ID, SUBSET, split=\"train\")\n", " print(f\"✓ {len(ds)} samples\")\n", " categories = ds[\"generator_type\"]\n", "\n", " bert_embs = extract_bert_embeddings(ds)\n", " flux_latents = extract_flux_latents(ds)\n", "\n", " model = train_bert_vae(bert_embs, flux_latents, categories)\n", "\n", " del flux_latents; gc.collect()\n", "\n", " results = analyze(model)\n", " return results\n", "\n", "results = run()" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 1000, "referenced_widgets": [ "18d06944cab84229bfc895be3045fb68", "336f4226b1404d9caf17957d0180ebe7", "d2f78086a06f466cb9f898e369b31737", "c8949f7b04404b26a2e2c07970103734", "85157b23501c456d97281e8036ac017b", "546a1931aca64c0dbd346300bc13f663", "d97f213da8be41458cb3c5cece728895", "bf7a710cf05649d8889a75c0787a8c54", "307d0098eeac4b4a96f2bbeed4cae44a", "28d12d4c5b684251a7012657e41ae3ed", "8a715535edb3453eb1f7c89a541c2128", "b7d99cf4da9f4b33ad45e4ead2f5fe94", "862e2ebceb4d4319b4bb3a614c37fb3f", "642c80df2cac4071973cfd3f2a9832aa", "aff9d3e555d244aeb292accd33f2b1d8", "2a7a6cc636f8464bb7f61d9654b91c7e", "fec4bf8d1247430baf5f9a14114d1111", "150bd779791d40ef828053256959e497", "d136acf173144a3fb2cd0804e7f7d1fd", "05eb480f62614bd6b4dfdf84d176fd0a", "4fa74733675d4af0b0b9b8f2c241e74c", "39cd54b59c4d4ec8be684ea8cf454863", "43b7ace8e5fe4afcaca3c8f16ea87d12", "ea78900a078a453cb2a9958112d4fd3d", "03f2d0c2f98b4e3f853da73866b1f545", "1333733eb94040f9b89cfe432ac630ac", "a0d3bf50dc2e40d4a2503ee33faef5d4", "266e0cbd75064af48cecdc2ca8d8fda9", "caedc4685ca1401fa3531422c9275f41", "41e9f6bec0f04a4c86bcbf8d99a5cbec", "f0f64a03c8394835a058bd5531dadbc3", "8ae8358c0e0f40deb6e35b04035c55e8", "394a822c5b7c4944af8425c04159518e", "b5ea8fff71e3412eb8bac4b8b9361b57", "73d5d37affca4f03b61255c91cf6b0e3", "98d35b0afb09470f80d4ddb3e4d335e6", "657e758a892148538b581eca528aa9e4", "f8aa47961fa64655b1258697a896826b", "f36c100ec8be42acbda64e61e8ec843c", "07b07954cbc84fcb95630911960665b0", "19b37d811f804a888b9a421cbe814f45", "610830e6733b46eda52e94143b85b9c0", "1db0a35340144968b3d5cb6e15822b43", "bd13afbac48a4ffb9d2d07bb160e694d" ] }, "id": "BpqcYQPKQsIc", "outputId": "ba5c5133-8fc3-4e25-e9cb-b948466fe3f4" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Loading AbstractPhil/synthetic-characters / schnell_full_1_512...\n" ] }, { "output_type": "display_data", "data": { "text/plain": [ "Resolving data files: 0%| | 0/66 [00:00, ,\n", ", etc.) — the same categories we're measuring discriminability on.\n", "\n", "Input: (B, 768) beatrix pooled embedding\n", "Output: (B, 8, 16, 16) geometric patch space\n", "\"\"\"\n", "\n", "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "from typing import Tuple, Dict\n", "\n", "\n", "class BeatrixVAE(nn.Module):\n", "\n", " def __init__(\n", " self,\n", " text_dim: int = 768,\n", " latent_channels: int = 8,\n", " spatial_size: int = 16,\n", " bottleneck_dim: int = 256,\n", " hidden_dim: int = 1024,\n", " num_layers: int = 3,\n", " dropout: float = 0.1,\n", " kl_weight: float = 1e-4,\n", " ):\n", " super().__init__()\n", " self.text_dim = text_dim\n", " self.latent_channels = latent_channels\n", " self.spatial_size = spatial_size\n", " self.bottleneck_dim = bottleneck_dim\n", " self.kl_weight = kl_weight\n", " self.flat_latent = latent_channels * spatial_size * spatial_size\n", "\n", " enc_layers = []\n", " dims = [text_dim] + [hidden_dim] * (num_layers - 1)\n", " for i in range(len(dims) - 1):\n", " enc_layers.extend([\n", " nn.Linear(dims[i], dims[i + 1]),\n", " nn.LayerNorm(dims[i + 1]),\n", " nn.GELU(),\n", " nn.Dropout(dropout),\n", " ])\n", " self.encoder = nn.Sequential(*enc_layers)\n", " self.fc_mu = nn.Linear(hidden_dim, bottleneck_dim)\n", " self.fc_logvar = nn.Linear(hidden_dim, bottleneck_dim)\n", "\n", " dec_layers = []\n", " dec_dims = [bottleneck_dim] + [hidden_dim] * (num_layers - 1) + [self.flat_latent]\n", " for i in range(len(dec_dims) - 1):\n", " dec_layers.append(nn.Linear(dec_dims[i], dec_dims[i + 1]))\n", " if i < len(dec_dims) - 2:\n", " dec_layers.extend([\n", " nn.LayerNorm(dec_dims[i + 1]),\n", " nn.GELU(),\n", " nn.Dropout(dropout),\n", " ])\n", " self.decoder = nn.Sequential(*dec_layers)\n", " self._init_weights()\n", "\n", " def _init_weights(self):\n", " for m in self.modules():\n", " if isinstance(m, nn.Linear):\n", " nn.init.kaiming_normal_(m.weight, nonlinearity='relu')\n", " if m.bias is not None:\n", " nn.init.zeros_(m.bias)\n", "\n", " def encode(self, text_emb: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:\n", " h = self.encoder(text_emb)\n", " return self.fc_mu(h), self.fc_logvar(h)\n", "\n", " def reparameterize(self, mu: torch.Tensor, logvar: torch.Tensor) -> torch.Tensor:\n", " if self.training:\n", " std = torch.exp(0.5 * logvar)\n", " return mu + torch.randn_like(std) * std\n", " return mu\n", "\n", " def decode(self, z: torch.Tensor) -> torch.Tensor:\n", " flat = self.decoder(z)\n", " return flat.view(-1, self.latent_channels, self.spatial_size, self.spatial_size)\n", "\n", " def forward(self, text_emb: torch.Tensor) -> Dict[str, torch.Tensor]:\n", " mu, logvar = self.encode(text_emb)\n", " z = self.reparameterize(mu, logvar)\n", " recon = self.decode(z)\n", " return {\"reconstruction\": recon, \"mu\": mu, \"logvar\": logvar, \"z\": z}\n", "\n", " def compute_loss(self, text_emb: torch.Tensor, target: torch.Tensor) -> Dict[str, torch.Tensor]:\n", " out = self.forward(text_emb)\n", " recon_loss = F.mse_loss(out[\"reconstruction\"], target)\n", " kl_loss = -0.5 * torch.mean(1 + out[\"logvar\"] - out[\"mu\"].pow(2) - out[\"logvar\"].exp())\n", " loss = recon_loss + self.kl_weight * kl_loss\n", " return {\"loss\": loss, \"recon_loss\": recon_loss, \"kl_loss\": kl_loss, \"reconstruction\": out[\"reconstruction\"]}\n", "\n", " @torch.no_grad()\n", " def generate_latent(self, text_emb: torch.Tensor) -> torch.Tensor:\n", " self.eval()\n", " mu, _ = self.encode(text_emb)\n", " return self.decode(mu)" ], "metadata": { "id": "cUy8OvGQSZjM" }, "execution_count": null, "outputs": [] }, { "cell_type": "markdown", "source": [ "## trainer" ], "metadata": { "id": "lpk23DI2SXWW" } }, { "cell_type": "code", "source": [ "\"\"\"\n", "Train BeatrixVAE\n", "=================\n", "bert-beatrix-2048 (nomic_bert, 768d) → geometric patches.\n", "\n", "Beatrix was explicitly trained on categorical tokens (, ,\n", ", etc.) with 2B+ samples. The hypothesis: a model pretrained to\n", "categorize these exact structural concepts should produce *better*\n", "geometric differentiation than generic BERT or T5.\n", "\n", "Requires trust_remote_code=True for nomic_bert architecture.\n", "Requires Cell 1 (generator.py) and Cell 2 (model.py) from geometric classifier.\n", "\"\"\"\n", "\n", "import os, gc, json\n", "import torch\n", "import torch.nn.functional as F\n", "from torch.utils.data import DataLoader, TensorDataset\n", "from tqdm import tqdm\n", "import numpy as np\n", "from collections import Counter\n", "\n", "# ── Config ────────────────────────────────────────────────────────────────────\n", "\n", "DATASET_ID = \"AbstractPhil/synthetic-characters\"\n", "SUBSET = \"schnell_full_1_512\"\n", "VAE_REPO = \"black-forest-labs/FLUX.1-schnell\"\n", "BEATRIX_MODEL = \"AbstractPhil/bert-beatrix-2048\"\n", "GEO_REPO = \"AbstractPhil/grid-geometric-multishape\"\n", "GEO_FILE = \"checkpoint_v10/best_model_epoch200.pt\"\n", "\n", "DEVICE = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n", "IMAGE_SIZE = 128\n", "FLUX_SCALE = 0.3611\n", "\n", "VAE_BATCH = 128\n", "BEATRIX_BATCH = 256\n", "\n", "EPOCHS = 50\n", "LR = 1e-3\n", "TRAIN_BATCH = 512\n", "VAL_SPLIT = 0.02\n", "SAVE_DIR = \"/content/geovae_proto/beatrix_vae\"\n", "CACHE_DIR = \"/content/geovae_cache\"\n", "\n", "TEXT_DIM = 768 # nomic_bert hidden size\n", "BOTTLENECK_DIM = 256\n", "HIDDEN_DIM = 1024\n", "NUM_LAYERS = 3\n", "KL_WEIGHT = 1e-4\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 1: Pre-extract\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def extract_beatrix_embeddings(ds):\n", " from transformers import AutoModel, AutoTokenizer\n", "\n", " cache_path = os.path.join(CACHE_DIR, \"beatrix_embeddings.pt\")\n", " if os.path.exists(cache_path):\n", " print(f\"✓ Cached Beatrix: {cache_path}\")\n", " return torch.load(cache_path, map_location=\"cpu\")\n", "\n", " os.makedirs(CACHE_DIR, exist_ok=True)\n", " print(f\"Loading {BEATRIX_MODEL} (trust_remote_code)...\")\n", " tokenizer = AutoTokenizer.from_pretrained(BEATRIX_MODEL, trust_remote_code=True)\n", " encoder = AutoModel.from_pretrained(BEATRIX_MODEL, trust_remote_code=True).to(DEVICE).eval()\n", "\n", " # Check hidden size\n", " hidden = encoder.config.hidden_size\n", " print(f\" Hidden size: {hidden}\")\n", " assert hidden == TEXT_DIM, f\"Expected {TEXT_DIM}, got {hidden}\"\n", "\n", " prompts = ds[\"prompt\"]\n", " all_embs = []\n", "\n", " for start in tqdm(range(0, len(prompts), BEATRIX_BATCH), desc=\"Beatrix encode\"):\n", " end = min(start + BEATRIX_BATCH, len(prompts))\n", " tokens = tokenizer(\n", " prompts[start:end], padding=True, truncation=True,\n", " max_length=512, return_tensors=\"pt\",\n", " ).to(DEVICE)\n", " with torch.no_grad():\n", " out = encoder(**tokens)\n", " # Mean pool over sequence (attention mask weighted)\n", " hidden_states = out.last_hidden_state\n", " mask = tokens[\"attention_mask\"].unsqueeze(-1).float()\n", " pooled = (hidden_states * mask).sum(1) / mask.sum(1).clamp(min=1e-9)\n", " all_embs.append(pooled.cpu())\n", "\n", " embs = torch.cat(all_embs)\n", " torch.save(embs, cache_path)\n", " print(f\"✓ Beatrix embeddings: {embs.shape}\")\n", "\n", " del encoder, tokenizer; gc.collect(); torch.cuda.empty_cache()\n", " return embs\n", "\n", "\n", "def extract_flux_latents(ds):\n", " \"\"\"Reuse cached latents from prior runs if available.\"\"\"\n", " from diffusers import AutoencoderKL\n", " from torchvision import transforms\n", "\n", " cache_path = os.path.join(CACHE_DIR, \"flux_latents.pt\")\n", " if os.path.exists(cache_path):\n", " print(f\"✓ Cached FLUX latents: {cache_path}\")\n", " return torch.load(cache_path, map_location=\"cpu\")\n", "\n", " os.makedirs(CACHE_DIR, exist_ok=True)\n", " print(\"Loading FLUX VAE (fp16)...\")\n", " vae = AutoencoderKL.from_pretrained(\n", " VAE_REPO, subfolder=\"vae\", torch_dtype=torch.float16,\n", " ).to(DEVICE).eval()\n", "\n", " img_transform = transforms.Compose([\n", " transforms.Resize((IMAGE_SIZE, IMAGE_SIZE)),\n", " transforms.ToTensor(),\n", " transforms.Normalize([0.5]*3, [0.5]*3),\n", " ])\n", "\n", " class ImgDS(torch.utils.data.Dataset):\n", " def __init__(self, hf_ds): self.ds = hf_ds\n", " def __len__(self): return len(self.ds)\n", " def __getitem__(self, i):\n", " try: return img_transform(self.ds[i][\"image\"].convert(\"RGB\"))\n", " except: return torch.zeros(3, IMAGE_SIZE, IMAGE_SIZE)\n", "\n", " loader = DataLoader(ImgDS(ds), batch_size=VAE_BATCH, shuffle=False,\n", " num_workers=8, pin_memory=True, prefetch_factor=4,\n", " persistent_workers=True)\n", "\n", " all_latents = []\n", " for batch in tqdm(loader, desc=\"VAE encode\"):\n", " batch = batch.to(DEVICE, non_blocking=True)\n", " with torch.no_grad(), torch.cuda.amp.autocast():\n", " z = vae.encode(batch.half()).latent_dist.sample() * FLUX_SCALE\n", " z = z.float()\n", " B, C, H, W = z.shape\n", " if H != 16 or W != 16:\n", " z = F.interpolate(z, size=(16, 16), mode='bilinear', align_corners=False)\n", " if C == 16:\n", " z = z.view(B, 8, 2, 16, 16).mean(dim=2)\n", " all_latents.append(z.cpu())\n", "\n", " latents = torch.cat(all_latents)\n", " torch.save(latents, cache_path)\n", " print(f\"✓ FLUX latents: {latents.shape}\")\n", "\n", " del vae; gc.collect(); torch.cuda.empty_cache()\n", " return latents\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 2: Train BeatrixVAE\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def train_beatrix_vae(beatrix_embs, flux_latents, categories):\n", "\n", " os.makedirs(SAVE_DIR, exist_ok=True)\n", " N = beatrix_embs.shape[0]\n", "\n", " n_val = max(int(N * VAL_SPLIT), 256)\n", " perm = torch.randperm(N)\n", " val_idx, train_idx = perm[:n_val], perm[n_val:]\n", " val_cats = [categories[i] for i in val_idx.tolist()]\n", "\n", " train_loader = DataLoader(\n", " TensorDataset(beatrix_embs[train_idx], flux_latents[train_idx]),\n", " batch_size=TRAIN_BATCH, shuffle=True, num_workers=4, pin_memory=True)\n", " val_loader = DataLoader(\n", " TensorDataset(beatrix_embs[val_idx], flux_latents[val_idx]),\n", " batch_size=TRAIN_BATCH, shuffle=False, num_workers=2, pin_memory=True)\n", "\n", " model = BeatrixVAE(\n", " text_dim=TEXT_DIM, bottleneck_dim=BOTTLENECK_DIM,\n", " hidden_dim=HIDDEN_DIM, num_layers=NUM_LAYERS, kl_weight=KL_WEIGHT,\n", " ).to(DEVICE)\n", " print(f\"BeatrixVAE: {sum(p.numel() for p in model.parameters()):,} params\")\n", " print(f\"Train: {len(train_idx)}, Val: {len(val_idx)}\")\n", "\n", " optimizer = torch.optim.AdamW(model.parameters(), lr=LR, weight_decay=1e-5)\n", " scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=EPOCHS)\n", "\n", " best_val = float(\"inf\")\n", " history = []\n", "\n", " for epoch in range(1, EPOCHS + 1):\n", " model.train()\n", " tr, tk, tt, n = 0, 0, 0, 0\n", " for tb, lb in train_loader:\n", " tb, lb = tb.to(DEVICE, non_blocking=True), lb.to(DEVICE, non_blocking=True)\n", " out = model.compute_loss(tb, lb)\n", " optimizer.zero_grad(); out[\"loss\"].backward()\n", " torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)\n", " optimizer.step()\n", " bs = tb.shape[0]\n", " tr += out[\"recon_loss\"].item() * bs\n", " tk += out[\"kl_loss\"].item() * bs\n", " tt += out[\"loss\"].item() * bs\n", " n += bs\n", " scheduler.step()\n", "\n", " model.eval()\n", " vr, vk, vt, nv = 0, 0, 0, 0\n", " with torch.no_grad():\n", " for tb, lb in val_loader:\n", " tb, lb = tb.to(DEVICE, non_blocking=True), lb.to(DEVICE, non_blocking=True)\n", " out = model.compute_loss(tb, lb)\n", " bs = tb.shape[0]\n", " vr += out[\"recon_loss\"].item() * bs\n", " vk += out[\"kl_loss\"].item() * bs\n", " vt += out[\"loss\"].item() * bs\n", " nv += bs\n", "\n", " tr, tk, tt = tr/n, tk/n, tt/n\n", " vr, vk, vt = vr/nv, vk/nv, vt/nv\n", "\n", " history.append({\"epoch\": epoch, \"train_recon\": tr, \"train_kl\": tk,\n", " \"val_recon\": vr, \"val_kl\": vk, \"val_total\": vt})\n", "\n", " tag = \" ✓ BEST\" if vt < best_val else \"\"\n", " print(f\" Epoch {epoch:3d} train={tt:.5f}(r={tr:.5f} kl={tk:.2f}) val={vt:.5f}(r={vr:.5f}){tag}\")\n", "\n", " if vt < best_val:\n", " best_val = vt\n", " torch.save({\"model_state_dict\": model.state_dict(), \"epoch\": epoch,\n", " \"val_loss\": vt, \"config\": {\"text_dim\": TEXT_DIM,\n", " \"bottleneck_dim\": BOTTLENECK_DIM, \"hidden_dim\": HIDDEN_DIM,\n", " \"num_layers\": NUM_LAYERS, \"kl_weight\": KL_WEIGHT}},\n", " os.path.join(SAVE_DIR, \"best_model.pt\"))\n", "\n", " with open(os.path.join(SAVE_DIR, \"training_history.json\"), \"w\") as f:\n", " json.dump(history, f, indent=2)\n", "\n", " torch.save({\"val_idx\": val_idx, \"val_categories\": val_cats,\n", " \"beatrix_embs_val\": beatrix_embs[val_idx]},\n", " os.path.join(SAVE_DIR, \"val_data.pt\"))\n", "\n", " print(f\"\\n✓ Best val loss: {best_val:.5f}\")\n", " return model\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Phase 3: Analyze\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def load_geometric_model():\n", " from huggingface_hub import hf_hub_download\n", " path = hf_hub_download(repo_id=GEO_REPO, filename=GEO_FILE)\n", " ckpt = torch.load(path, map_location=DEVICE, weights_only=False)\n", " cfg = ckpt[\"config\"]\n", " model = SuperpositionPatchClassifier(\n", " embed_dim=cfg[\"embed_dim\"], patch_dim=cfg[\"patch_dim\"],\n", " n_bootstrap=cfg[\"n_bootstrap\"], n_geometric=cfg[\"n_geometric\"],\n", " n_heads=cfg[\"n_heads\"], dropout=0.0,\n", " ).to(DEVICE).eval()\n", " model.load_state_dict(ckpt[\"model_state_dict\"])\n", " print(f\"✓ Geometric model (epoch {ckpt['epoch']})\")\n", " return model\n", "\n", "\n", "@torch.no_grad()\n", "def extract_geometric_features(latents, geo_model, batch_size=256):\n", " all_gates, all_patch = [], []\n", " for s in range(0, latents.shape[0], batch_size):\n", " batch = latents[s:s+batch_size].to(DEVICE)\n", " out = geo_model(batch)\n", " local = torch.cat([\n", " F.softmax(out[\"local_dim_logits\"], dim=-1),\n", " F.softmax(out[\"local_curv_logits\"], dim=-1),\n", " torch.sigmoid(out[\"local_bound_logits\"]),\n", " torch.sigmoid(out[\"local_axis_logits\"]),\n", " ], dim=-1)\n", " struct = torch.cat([\n", " F.softmax(out[\"struct_topo_logits\"], dim=-1),\n", " torch.sigmoid(out[\"struct_neighbor_logits\"]),\n", " F.softmax(out[\"struct_role_logits\"], dim=-1),\n", " ], dim=-1)\n", " all_gates.append(torch.cat([local, struct], dim=-1).cpu())\n", " all_patch.append(out[\"patch_features\"].cpu())\n", " return torch.cat(all_gates), torch.cat(all_patch)\n", "\n", "\n", "def compute_discriminability(features_flat, categories, min_n=10):\n", " cat_counts = Counter(categories)\n", " cat_names = sorted([c for c, n in cat_counts.items() if n >= min_n])\n", " cat_array = np.array(categories)\n", " cat_indices = {c: torch.from_numpy(np.where(cat_array == c)[0]).long() for c in cat_names}\n", " sizes = {c: len(cat_indices[c]) for c in cat_names}\n", " total = sum(sizes.values())\n", "\n", " feat_gpu = F.normalize(features_flat.to(DEVICE), dim=-1)\n", " n = len(cat_names)\n", " mat = np.zeros((n, n))\n", "\n", " for i, ci in enumerate(cat_names):\n", " fi = feat_gpu[cat_indices[ci]]\n", " for j, cj in enumerate(cat_names):\n", " if j < i:\n", " mat[i, j] = mat[j, i]; continue\n", " fj = feat_gpu[cat_indices[cj]]\n", " sim = fi @ fj.T\n", " if i == j:\n", " mask = ~torch.eye(sim.shape[0], dtype=torch.bool, device=DEVICE)\n", " val = sim[mask].mean().item() if mask.sum() > 0 else 1.0\n", " else:\n", " val = sim.mean().item()\n", " mat[i, j] = val\n", " if j > i: mat[j, i] = val\n", "\n", " del feat_gpu; torch.cuda.empty_cache()\n", "\n", " disc = {}\n", " for i, ci in enumerate(cat_names):\n", " cross_sum, cross_n = 0.0, 0\n", " for j, cj in enumerate(cat_names):\n", " if i != j:\n", " cross_sum += mat[i, j] * sizes[cj]\n", " cross_n += sizes[cj]\n", " disc[ci] = float(mat[i, i] - cross_sum / max(cross_n, 1))\n", "\n", " overall = sum(disc[c] * sizes[c] / total for c in cat_names)\n", " return {\"overall\": overall, \"per_category\": disc, \"matrix\": mat.tolist(),\n", " \"cat_names\": cat_names, \"sizes\": sizes}\n", "\n", "\n", "def analyze(beatrix_vae_model):\n", " print(\"\\n\" + \"=\" * 60)\n", " print(\"Phase 3: Beatrix Geometric Analysis\")\n", " print(\"=\" * 60)\n", "\n", " val_data = torch.load(os.path.join(SAVE_DIR, \"val_data.pt\"),\n", " map_location=\"cpu\", weights_only=False)\n", " beatrix_val = val_data[\"beatrix_embs_val\"]\n", " categories = val_data[\"val_categories\"]\n", " print(f\"Analyzing {beatrix_val.shape[0]} text-derived patches (Beatrix)\")\n", "\n", " beatrix_vae_model.eval()\n", " text_latents = beatrix_vae_model.generate_latent(beatrix_val.to(DEVICE)).cpu()\n", " print(f\" Patches: {text_latents.shape}\")\n", "\n", " geo_model = load_geometric_model()\n", " gates, patch_feats = extract_geometric_features(text_latents, geo_model)\n", " del geo_model; gc.collect(); torch.cuda.empty_cache()\n", "\n", " results = {}\n", " for name, feat in [(\"gate_vectors\", gates.flatten(1)),\n", " (\"patch_feat\", patch_feats.flatten(1)),\n", " (\"global_feat\", patch_feats.mean(dim=1))]:\n", " print(f\"\\n {name}:\")\n", " d = compute_discriminability(feat, categories)\n", " results[name] = d\n", " print(f\" Overall discriminability: {d['overall']:+.4f}\")\n", "\n", " sorted_d = sorted(d[\"per_category\"].items(), key=lambda x: -x[1])\n", " for c, v in sorted_d[:5]:\n", " print(f\" {c:30s} {v:+.4f} (n={d['sizes'].get(c, 0)})\")\n", " print(f\" ...\")\n", " for c, v in sorted_d[-3:]:\n", " print(f\" {c:30s} {v:+.4f} (n={d['sizes'].get(c, 0)})\")\n", "\n", " eval_dir = os.path.join(SAVE_DIR, \"eval\")\n", " os.makedirs(eval_dir, exist_ok=True)\n", " with open(os.path.join(eval_dir, \"beatrix_geometric_results.json\"), \"w\") as f:\n", " json.dump(results, f, indent=2)\n", " torch.save({\"gates\": gates, \"patch_feats\": patch_feats,\n", " \"categories\": categories, \"text_latents\": text_latents},\n", " os.path.join(eval_dir, \"beatrix_geometric_features.pt\"))\n", "\n", " print(f\"\\n✓ Results saved to {eval_dir}\")\n", " return results\n", "\n", "\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "# Main\n", "# ══════════════════════════════════════════════════════════════════════════════\n", "\n", "def run():\n", " from datasets import load_dataset\n", "\n", " print(f\"Loading {DATASET_ID} / {SUBSET}...\")\n", " ds = load_dataset(DATASET_ID, SUBSET, split=\"train\")\n", " print(f\"✓ {len(ds)} samples\")\n", " categories = ds[\"generator_type\"]\n", "\n", " beatrix_embs = extract_beatrix_embeddings(ds)\n", " flux_latents = extract_flux_latents(ds)\n", "\n", " model = train_beatrix_vae(beatrix_embs, flux_latents, categories)\n", "\n", " del flux_latents; gc.collect()\n", "\n", " results = analyze(model)\n", " return results\n", "\n", "results = run()" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 1000, "referenced_widgets": [ "8109359c35fa411b8af2da9ab5de23d9", "2b2eae2c4e0d4f50bd7fe41c87783623", "f32081acffb34428af5804fad25f324a", "7a0183d109d349c9b1478c5ecab34ab5", "acf388cc4b484a5fb3c1992e2cf83671", "693a0407b946478cbc1c01e35812836b", "1368e6e49f0b40da8debb6dc833b68fa", "6b96660a08ee4593b3bbedf15bc343be", "a677eb094f7844508fc40330f49e4d81", "46c274f611614099839ac4739c8b4526", "1833901ffacc4b0db48ac24156666e22", "cb1703b325ef4758b894bc1ee7ea686e", "942b9bde8a22414f8fb07202baaf77e6", "57219c9c8dd04d42939eee9bf04672ba", "abf227988c764efca54ac60f3bf6c2e1", "cb7127277ccc48b99b00dd142a5cc337", "c0d39303e1f7487ea28817e7ac79d74e", "868024569e624fa3985089b615bf63f0", "cc3ca9e781994afdbc211a7090ea8c78", "0216cc2b90074fb3b50b5d6801cc9a6a", "289fb4145aaa4efe89d2485e30d4aa9e", "f38d40c8bcfa4620a1fdde384aba7769", "b26211cdfab94de0b62ab1df0e5db0b9", "d39165b2ae9c4e388f27caa2b2707888", "90542f59a0274d83bb32e2ddd6efd774", "019e21884d9b46a2ba71c4326f397345", "bf904531bf5b4465a466c415709d561c", "1f2cdd860249472a822050ab53fb8804", "ac1011791cac4be7a563ecfc4614b3eb", "4d892ae8ef7a4d00a21c2e60948850f8", "d2a3155f414144588edd9ca78604dfed", "0ba551aeb35049d49b5cfde76b0aa5d0", "51cb99c3c2f84530964e4c870f3c47e4" ] }, "id": "9x-9dZHqSabv", "outputId": "18176bf8-ea92-4e39-c242-36b58a60f1b6" }, "execution_count": 20, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Loading AbstractPhil/synthetic-characters / schnell_full_1_512...\n" ] }, { "output_type": "display_data", "data": { "text/plain": [ "Resolving data files: 0%| | 0/66 [00:00