diff --git a/.gitattributes b/.gitattributes
index a225144b30fa4f75c9a54d92e205e6b05c3312ac..daa73d6068354495423c08967fa1af7745df3019 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -965,3 +965,4 @@ parrot/lib/python3.10/lib-dynload/_asyncio.cpython-310-x86_64-linux-gnu.so filte
parrot/lib/python3.10/lib-dynload/_blake2.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
vllm/lib/python3.10/site-packages/pandas/tests/frame/__pycache__/test_constructors.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
openflamingo/lib/python3.10/site-packages/3204bda914b7f2c6f497__mypyc.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
+parrot/lib/python3.10/html/__pycache__/entities.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
diff --git a/parrot/lib/python3.10/html/__pycache__/entities.cpython-310.pyc b/parrot/lib/python3.10/html/__pycache__/entities.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1233e502e6006b37940ee5d283b30d645f4abc18
--- /dev/null
+++ b/parrot/lib/python3.10/html/__pycache__/entities.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f639bcf090e65d074c5c56fa79ad59fe482e2ff1716c63f7358c850eec8067d5
+size 144684
diff --git a/parrot/share/terminfo/k/kterm-co b/parrot/share/terminfo/k/kterm-co
new file mode 100644
index 0000000000000000000000000000000000000000..91f78969c46007d40b8c0496f616787312020d5d
Binary files /dev/null and b/parrot/share/terminfo/k/kterm-co differ
diff --git a/parrot/share/terminfo/s/sb1 b/parrot/share/terminfo/s/sb1
new file mode 100644
index 0000000000000000000000000000000000000000..19f119dadff726c935928b72e9f3e4afea30c674
Binary files /dev/null and b/parrot/share/terminfo/s/sb1 differ
diff --git a/parrot/share/terminfo/s/sb3 b/parrot/share/terminfo/s/sb3
new file mode 100644
index 0000000000000000000000000000000000000000..21b7e178bf1a3ac8e15e5f1a9fe4982fc4794adb
Binary files /dev/null and b/parrot/share/terminfo/s/sb3 differ
diff --git a/parrot/share/terminfo/s/sc415 b/parrot/share/terminfo/s/sc415
new file mode 100644
index 0000000000000000000000000000000000000000..d30d42cae1a7ccbab7b1c641352de344e10da64d
Binary files /dev/null and b/parrot/share/terminfo/s/sc415 differ
diff --git a/parrot/share/terminfo/s/scoansi b/parrot/share/terminfo/s/scoansi
new file mode 100644
index 0000000000000000000000000000000000000000..9ad0cce6ce6275ca22d4d49ddd4c28f13125db50
Binary files /dev/null and b/parrot/share/terminfo/s/scoansi differ
diff --git a/parrot/share/terminfo/s/scoansi-old b/parrot/share/terminfo/s/scoansi-old
new file mode 100644
index 0000000000000000000000000000000000000000..7ae39e9a995499eef38ed9c444837ff5b597a26e
Binary files /dev/null and b/parrot/share/terminfo/s/scoansi-old differ
diff --git a/parrot/share/terminfo/s/screen b/parrot/share/terminfo/s/screen
new file mode 100644
index 0000000000000000000000000000000000000000..97f9e56759e4c938d4a274d9aa9c3487be66ed4d
Binary files /dev/null and b/parrot/share/terminfo/s/screen differ
diff --git a/parrot/share/terminfo/s/screen+fkeys b/parrot/share/terminfo/s/screen+fkeys
new file mode 100644
index 0000000000000000000000000000000000000000..a3564ab154d525b9d3cea7eb70485cf0b10400f9
Binary files /dev/null and b/parrot/share/terminfo/s/screen+fkeys differ
diff --git a/parrot/share/terminfo/s/screen+italics b/parrot/share/terminfo/s/screen+italics
new file mode 100644
index 0000000000000000000000000000000000000000..efbdd64e18596cec43adac058c688bac9079c5ec
Binary files /dev/null and b/parrot/share/terminfo/s/screen+italics differ
diff --git a/parrot/share/terminfo/s/screen-16color b/parrot/share/terminfo/s/screen-16color
new file mode 100644
index 0000000000000000000000000000000000000000..990679d0ba4b5927e59987ed95b1fbb68b21d035
Binary files /dev/null and b/parrot/share/terminfo/s/screen-16color differ
diff --git a/parrot/share/terminfo/s/screen-16color-s b/parrot/share/terminfo/s/screen-16color-s
new file mode 100644
index 0000000000000000000000000000000000000000..6ac7a1fa7d1714dfefb7a1726ca33dc06d8e09b3
Binary files /dev/null and b/parrot/share/terminfo/s/screen-16color-s differ
diff --git a/parrot/share/terminfo/s/screen-256color-bce b/parrot/share/terminfo/s/screen-256color-bce
new file mode 100644
index 0000000000000000000000000000000000000000..972b04840b6ede8bf211fefb0266a86925cbf226
Binary files /dev/null and b/parrot/share/terminfo/s/screen-256color-bce differ
diff --git a/parrot/share/terminfo/s/screen-256color-s b/parrot/share/terminfo/s/screen-256color-s
new file mode 100644
index 0000000000000000000000000000000000000000..61d30ca7b85ed58cb0eec5320a698e377e607452
Binary files /dev/null and b/parrot/share/terminfo/s/screen-256color-s differ
diff --git a/parrot/share/terminfo/s/screen-base b/parrot/share/terminfo/s/screen-base
new file mode 100644
index 0000000000000000000000000000000000000000..ea73ce1d85f0919a17d872ac59ec86b4aa1247f8
Binary files /dev/null and b/parrot/share/terminfo/s/screen-base differ
diff --git a/parrot/share/terminfo/s/screen-bce.gnome b/parrot/share/terminfo/s/screen-bce.gnome
new file mode 100644
index 0000000000000000000000000000000000000000..70ad28cbcf06fca1f5ccb229b5e7e14808fb67ad
Binary files /dev/null and b/parrot/share/terminfo/s/screen-bce.gnome differ
diff --git a/parrot/share/terminfo/s/screen-bce.linux b/parrot/share/terminfo/s/screen-bce.linux
new file mode 100644
index 0000000000000000000000000000000000000000..242cc07d1e5146210e983671bdbbc85c33beb7ae
Binary files /dev/null and b/parrot/share/terminfo/s/screen-bce.linux differ
diff --git a/parrot/share/terminfo/s/screen-bce.mrxvt b/parrot/share/terminfo/s/screen-bce.mrxvt
new file mode 100644
index 0000000000000000000000000000000000000000..80e1fe5e6000222e8f3d5ffff2ce714f2ec6b0cb
Binary files /dev/null and b/parrot/share/terminfo/s/screen-bce.mrxvt differ
diff --git a/parrot/share/terminfo/s/screen-w b/parrot/share/terminfo/s/screen-w
new file mode 100644
index 0000000000000000000000000000000000000000..681e95a524f11fa0b4be09eb40ac07fdde03b483
Binary files /dev/null and b/parrot/share/terminfo/s/screen-w differ
diff --git a/parrot/share/terminfo/s/screen.Eterm b/parrot/share/terminfo/s/screen.Eterm
new file mode 100644
index 0000000000000000000000000000000000000000..8ed159d35a476aeed062d3783b6c7a82790ebfee
Binary files /dev/null and b/parrot/share/terminfo/s/screen.Eterm differ
diff --git a/parrot/share/terminfo/s/screen.linux b/parrot/share/terminfo/s/screen.linux
new file mode 100644
index 0000000000000000000000000000000000000000..2a508d1238ab3b6332b9f8882909f85eb90a006e
Binary files /dev/null and b/parrot/share/terminfo/s/screen.linux differ
diff --git a/parrot/share/terminfo/s/screen.linux-m1 b/parrot/share/terminfo/s/screen.linux-m1
new file mode 100644
index 0000000000000000000000000000000000000000..b0c2157aabde7b54866deffae18722bc870a7a46
Binary files /dev/null and b/parrot/share/terminfo/s/screen.linux-m1 differ
diff --git a/parrot/share/terminfo/s/screen.linux-m2 b/parrot/share/terminfo/s/screen.linux-m2
new file mode 100644
index 0000000000000000000000000000000000000000..1aeb07dcb82efebd34d88c78c9899a7213b19875
Binary files /dev/null and b/parrot/share/terminfo/s/screen.linux-m2 differ
diff --git a/parrot/share/terminfo/s/screen.minitel1-nb b/parrot/share/terminfo/s/screen.minitel1-nb
new file mode 100644
index 0000000000000000000000000000000000000000..b4a3e1fd95c79b0fe994a7a3764d1f8b9a8861ce
Binary files /dev/null and b/parrot/share/terminfo/s/screen.minitel1-nb differ
diff --git a/parrot/share/terminfo/s/screen.minitel1b-80 b/parrot/share/terminfo/s/screen.minitel1b-80
new file mode 100644
index 0000000000000000000000000000000000000000..c4d61d0c07c3325135e21e8e92ce9d18431e8879
Binary files /dev/null and b/parrot/share/terminfo/s/screen.minitel1b-80 differ
diff --git a/parrot/share/terminfo/s/screen.minitel2-80 b/parrot/share/terminfo/s/screen.minitel2-80
new file mode 100644
index 0000000000000000000000000000000000000000..c4d61d0c07c3325135e21e8e92ce9d18431e8879
Binary files /dev/null and b/parrot/share/terminfo/s/screen.minitel2-80 differ
diff --git a/parrot/share/terminfo/s/screen.putty-m1 b/parrot/share/terminfo/s/screen.putty-m1
new file mode 100644
index 0000000000000000000000000000000000000000..67912a1ae808c9062dac56efbdadc648f69fbebb
Binary files /dev/null and b/parrot/share/terminfo/s/screen.putty-m1 differ
diff --git a/parrot/share/terminfo/s/screen.xterm-new b/parrot/share/terminfo/s/screen.xterm-new
new file mode 100644
index 0000000000000000000000000000000000000000..87fff45447a60af125935941de6f320c1fced3a3
Binary files /dev/null and b/parrot/share/terminfo/s/screen.xterm-new differ
diff --git a/parrot/share/terminfo/s/scrhp b/parrot/share/terminfo/s/scrhp
new file mode 100644
index 0000000000000000000000000000000000000000..1698bf5d4e3bab78b5b4d1113c211560f5505bc0
Binary files /dev/null and b/parrot/share/terminfo/s/scrhp differ
diff --git a/parrot/share/terminfo/s/scrt b/parrot/share/terminfo/s/scrt
new file mode 100644
index 0000000000000000000000000000000000000000..228f3fd981507997d43e10db0d048f13f31c5738
Binary files /dev/null and b/parrot/share/terminfo/s/scrt differ
diff --git a/parrot/share/terminfo/s/securecrt b/parrot/share/terminfo/s/securecrt
new file mode 100644
index 0000000000000000000000000000000000000000..228f3fd981507997d43e10db0d048f13f31c5738
Binary files /dev/null and b/parrot/share/terminfo/s/securecrt differ
diff --git a/parrot/share/terminfo/s/st-0.7 b/parrot/share/terminfo/s/st-0.7
new file mode 100644
index 0000000000000000000000000000000000000000..0052581cd1dccbe062dda26bb76fb085cbf31968
Binary files /dev/null and b/parrot/share/terminfo/s/st-0.7 differ
diff --git a/parrot/share/terminfo/s/st-0.8 b/parrot/share/terminfo/s/st-0.8
new file mode 100644
index 0000000000000000000000000000000000000000..a16520e49f9b777c995a62f6b9de825385d8b22d
Binary files /dev/null and b/parrot/share/terminfo/s/st-0.8 differ
diff --git a/parrot/share/terminfo/s/st52-color b/parrot/share/terminfo/s/st52-color
new file mode 100644
index 0000000000000000000000000000000000000000..608e45420287020211bd17a09032e575a6eef743
Binary files /dev/null and b/parrot/share/terminfo/s/st52-color differ
diff --git a/parrot/share/terminfo/s/st52-old b/parrot/share/terminfo/s/st52-old
new file mode 100644
index 0000000000000000000000000000000000000000..bc9acb631eda372133bd6f0b546391beb1f36a3f
Binary files /dev/null and b/parrot/share/terminfo/s/st52-old differ
diff --git a/parrot/share/terminfo/s/stv52 b/parrot/share/terminfo/s/stv52
new file mode 100644
index 0000000000000000000000000000000000000000..384f8dd9735499c53a3ddfd340bff39877b2dc7c
Binary files /dev/null and b/parrot/share/terminfo/s/stv52 differ
diff --git a/parrot/share/terminfo/s/stv52pc b/parrot/share/terminfo/s/stv52pc
new file mode 100644
index 0000000000000000000000000000000000000000..18473147563ef2303b57d0815459f2e6e1101c13
Binary files /dev/null and b/parrot/share/terminfo/s/stv52pc differ
diff --git a/parrot/share/terminfo/s/sun+sl b/parrot/share/terminfo/s/sun+sl
new file mode 100644
index 0000000000000000000000000000000000000000..c5fd523381f36af2e156750085d0b98699ef0dfb
Binary files /dev/null and b/parrot/share/terminfo/s/sun+sl differ
diff --git a/parrot/share/terminfo/s/sun-e b/parrot/share/terminfo/s/sun-e
new file mode 100644
index 0000000000000000000000000000000000000000..33dbc23f20f541523cdcea5db89164872a12cb89
Binary files /dev/null and b/parrot/share/terminfo/s/sun-e differ
diff --git a/parrot/share/terminfo/s/sun-il b/parrot/share/terminfo/s/sun-il
new file mode 100644
index 0000000000000000000000000000000000000000..c431b47b0f34db7fe1992aff161da2d6ae4dc0f8
Binary files /dev/null and b/parrot/share/terminfo/s/sun-il differ
diff --git a/parrot/share/terminfo/s/sun-s b/parrot/share/terminfo/s/sun-s
new file mode 100644
index 0000000000000000000000000000000000000000..5ce9922f2be8ac665d8e2c9f75edd35ee73375e4
Binary files /dev/null and b/parrot/share/terminfo/s/sun-s differ
diff --git a/parrot/share/terminfo/s/sun-s-e b/parrot/share/terminfo/s/sun-s-e
new file mode 100644
index 0000000000000000000000000000000000000000..d2d3902f5af2d596fb29592851bdc2caf7d60876
Binary files /dev/null and b/parrot/share/terminfo/s/sun-s-e differ
diff --git a/parrot/share/terminfo/s/swtp b/parrot/share/terminfo/s/swtp
new file mode 100644
index 0000000000000000000000000000000000000000..3e68f869ea34a85fcf46e8b90d1a40c332889f3f
Binary files /dev/null and b/parrot/share/terminfo/s/swtp differ
diff --git a/parrot/share/terminfo/s/system1 b/parrot/share/terminfo/s/system1
new file mode 100644
index 0000000000000000000000000000000000000000..39ab6a3a9b932b80349a4b30359a1708b38a0aeb
Binary files /dev/null and b/parrot/share/terminfo/s/system1 differ
diff --git a/videollama2/lib/python3.10/site-packages/altair/__init__.py b/videollama2/lib/python3.10/site-packages/altair/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..93dc8ed02c9b0df6d40e47db9ea16acf473165c4
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/altair/__init__.py
@@ -0,0 +1,661 @@
+# ruff: noqa
+__version__ = "5.4.1"
+
+# The content of __all__ is automatically written by
+# tools/update_init_file.py. Do not modify directly.
+__all__ = [
+ "Aggregate",
+ "AggregateOp",
+ "AggregateTransform",
+ "AggregatedFieldDef",
+ "Align",
+ "AllSortString",
+ "AltairDeprecationWarning",
+ "Angle",
+ "AngleDatum",
+ "AngleValue",
+ "AnyMark",
+ "AnyMarkConfig",
+ "AreaConfig",
+ "ArgmaxDef",
+ "ArgminDef",
+ "AutoSizeParams",
+ "AutosizeType",
+ "Axis",
+ "AxisConfig",
+ "AxisOrient",
+ "AxisResolveMap",
+ "BBox",
+ "BarConfig",
+ "BaseTitleNoValueRefs",
+ "Baseline",
+ "Bin",
+ "BinExtent",
+ "BinParams",
+ "BinTransform",
+ "BindCheckbox",
+ "BindDirect",
+ "BindInput",
+ "BindRadioSelect",
+ "BindRange",
+ "Binding",
+ "BinnedTimeUnit",
+ "Blend",
+ "BoxPlot",
+ "BoxPlotConfig",
+ "BoxPlotDef",
+ "BrushConfig",
+ "CalculateTransform",
+ "Categorical",
+ "ChainedWhen",
+ "Chart",
+ "ChartDataType",
+ "Color",
+ "ColorDatum",
+ "ColorDef",
+ "ColorName",
+ "ColorScheme",
+ "ColorValue",
+ "Column",
+ "CompositeMark",
+ "CompositeMarkDef",
+ "CompositionConfig",
+ "ConcatChart",
+ "ConcatSpecGenericSpec",
+ "ConditionalAxisColor",
+ "ConditionalAxisLabelAlign",
+ "ConditionalAxisLabelBaseline",
+ "ConditionalAxisLabelFontStyle",
+ "ConditionalAxisLabelFontWeight",
+ "ConditionalAxisNumber",
+ "ConditionalAxisNumberArray",
+ "ConditionalAxisPropertyAlignnull",
+ "ConditionalAxisPropertyColornull",
+ "ConditionalAxisPropertyFontStylenull",
+ "ConditionalAxisPropertyFontWeightnull",
+ "ConditionalAxisPropertyTextBaselinenull",
+ "ConditionalAxisPropertynumberArraynull",
+ "ConditionalAxisPropertynumbernull",
+ "ConditionalAxisPropertystringnull",
+ "ConditionalAxisString",
+ "ConditionalMarkPropFieldOrDatumDef",
+ "ConditionalMarkPropFieldOrDatumDefTypeForShape",
+ "ConditionalParameterMarkPropFieldOrDatumDef",
+ "ConditionalParameterMarkPropFieldOrDatumDefTypeForShape",
+ "ConditionalParameterStringFieldDef",
+ "ConditionalParameterValueDefGradientstringnullExprRef",
+ "ConditionalParameterValueDefTextExprRef",
+ "ConditionalParameterValueDefnumber",
+ "ConditionalParameterValueDefnumberArrayExprRef",
+ "ConditionalParameterValueDefnumberExprRef",
+ "ConditionalParameterValueDefstringExprRef",
+ "ConditionalParameterValueDefstringnullExprRef",
+ "ConditionalPredicateMarkPropFieldOrDatumDef",
+ "ConditionalPredicateMarkPropFieldOrDatumDefTypeForShape",
+ "ConditionalPredicateStringFieldDef",
+ "ConditionalPredicateValueDefAlignnullExprRef",
+ "ConditionalPredicateValueDefColornullExprRef",
+ "ConditionalPredicateValueDefFontStylenullExprRef",
+ "ConditionalPredicateValueDefFontWeightnullExprRef",
+ "ConditionalPredicateValueDefGradientstringnullExprRef",
+ "ConditionalPredicateValueDefTextBaselinenullExprRef",
+ "ConditionalPredicateValueDefTextExprRef",
+ "ConditionalPredicateValueDefnumber",
+ "ConditionalPredicateValueDefnumberArrayExprRef",
+ "ConditionalPredicateValueDefnumberArraynullExprRef",
+ "ConditionalPredicateValueDefnumberExprRef",
+ "ConditionalPredicateValueDefnumbernullExprRef",
+ "ConditionalPredicateValueDefstringExprRef",
+ "ConditionalPredicateValueDefstringnullExprRef",
+ "ConditionalStringFieldDef",
+ "ConditionalValueDefGradientstringnullExprRef",
+ "ConditionalValueDefTextExprRef",
+ "ConditionalValueDefnumber",
+ "ConditionalValueDefnumberArrayExprRef",
+ "ConditionalValueDefnumberExprRef",
+ "ConditionalValueDefstringExprRef",
+ "ConditionalValueDefstringnullExprRef",
+ "Config",
+ "CsvDataFormat",
+ "Cursor",
+ "Cyclical",
+ "Data",
+ "DataFormat",
+ "DataSource",
+ "DataType",
+ "Datasets",
+ "DateTime",
+ "DatumChannelMixin",
+ "DatumDef",
+ "Day",
+ "DensityTransform",
+ "DerivedStream",
+ "Description",
+ "DescriptionValue",
+ "Detail",
+ "Dict",
+ "DictInlineDataset",
+ "DictSelectionInit",
+ "DictSelectionInitInterval",
+ "Diverging",
+ "DomainUnionWith",
+ "DsvDataFormat",
+ "Element",
+ "Encoding",
+ "EncodingSortField",
+ "ErrorBand",
+ "ErrorBandConfig",
+ "ErrorBandDef",
+ "ErrorBar",
+ "ErrorBarConfig",
+ "ErrorBarDef",
+ "ErrorBarExtent",
+ "EventStream",
+ "EventType",
+ "Expr",
+ "ExprRef",
+ "ExtentTransform",
+ "Facet",
+ "FacetChart",
+ "FacetEncodingFieldDef",
+ "FacetFieldDef",
+ "FacetMapping",
+ "FacetSpec",
+ "FacetedEncoding",
+ "FacetedUnitSpec",
+ "Feature",
+ "FeatureCollection",
+ "FeatureGeometryGeoJsonProperties",
+ "Field",
+ "FieldChannelMixin",
+ "FieldDefWithoutScale",
+ "FieldEqualPredicate",
+ "FieldGTEPredicate",
+ "FieldGTPredicate",
+ "FieldLTEPredicate",
+ "FieldLTPredicate",
+ "FieldName",
+ "FieldOneOfPredicate",
+ "FieldOrDatumDefWithConditionDatumDefGradientstringnull",
+ "FieldOrDatumDefWithConditionDatumDefnumber",
+ "FieldOrDatumDefWithConditionDatumDefnumberArray",
+ "FieldOrDatumDefWithConditionDatumDefstringnull",
+ "FieldOrDatumDefWithConditionMarkPropFieldDefGradientstringnull",
+ "FieldOrDatumDefWithConditionMarkPropFieldDefTypeForShapestringnull",
+ "FieldOrDatumDefWithConditionMarkPropFieldDefnumber",
+ "FieldOrDatumDefWithConditionMarkPropFieldDefnumberArray",
+ "FieldOrDatumDefWithConditionStringDatumDefText",
+ "FieldOrDatumDefWithConditionStringFieldDefText",
+ "FieldOrDatumDefWithConditionStringFieldDefstring",
+ "FieldRange",
+ "FieldRangePredicate",
+ "FieldValidPredicate",
+ "Fill",
+ "FillDatum",
+ "FillOpacity",
+ "FillOpacityDatum",
+ "FillOpacityValue",
+ "FillValue",
+ "FilterTransform",
+ "Fit",
+ "FlattenTransform",
+ "FoldTransform",
+ "FontStyle",
+ "FontWeight",
+ "FormatConfig",
+ "Generator",
+ "GenericUnitSpecEncodingAnyMark",
+ "GeoJsonFeature",
+ "GeoJsonFeatureCollection",
+ "GeoJsonProperties",
+ "Geometry",
+ "GeometryCollection",
+ "Gradient",
+ "GradientStop",
+ "GraticuleGenerator",
+ "GraticuleParams",
+ "HConcatChart",
+ "HConcatSpecGenericSpec",
+ "Header",
+ "HeaderConfig",
+ "HexColor",
+ "Href",
+ "HrefValue",
+ "Impute",
+ "ImputeMethod",
+ "ImputeParams",
+ "ImputeSequence",
+ "ImputeTransform",
+ "InlineData",
+ "InlineDataset",
+ "Interpolate",
+ "IntervalSelectionConfig",
+ "IntervalSelectionConfigWithoutType",
+ "JoinAggregateFieldDef",
+ "JoinAggregateTransform",
+ "JsonDataFormat",
+ "JupyterChart",
+ "Key",
+ "LabelOverlap",
+ "LatLongDef",
+ "LatLongFieldDef",
+ "Latitude",
+ "Latitude2",
+ "Latitude2Datum",
+ "Latitude2Value",
+ "LatitudeDatum",
+ "LayerChart",
+ "LayerRepeatMapping",
+ "LayerRepeatSpec",
+ "LayerSpec",
+ "LayoutAlign",
+ "Legend",
+ "LegendBinding",
+ "LegendConfig",
+ "LegendOrient",
+ "LegendResolveMap",
+ "LegendStreamBinding",
+ "LineConfig",
+ "LineString",
+ "LinearGradient",
+ "LocalMultiTimeUnit",
+ "LocalSingleTimeUnit",
+ "Locale",
+ "LoessTransform",
+ "LogicalAndPredicate",
+ "LogicalNotPredicate",
+ "LogicalOrPredicate",
+ "Longitude",
+ "Longitude2",
+ "Longitude2Datum",
+ "Longitude2Value",
+ "LongitudeDatum",
+ "LookupData",
+ "LookupSelection",
+ "LookupTransform",
+ "Mark",
+ "MarkConfig",
+ "MarkDef",
+ "MarkInvalidDataMode",
+ "MarkPropDefGradientstringnull",
+ "MarkPropDefnumber",
+ "MarkPropDefnumberArray",
+ "MarkPropDefstringnullTypeForShape",
+ "MarkType",
+ "MaxRowsError",
+ "MergedStream",
+ "Month",
+ "MultiLineString",
+ "MultiPoint",
+ "MultiPolygon",
+ "MultiTimeUnit",
+ "NamedData",
+ "NonArgAggregateOp",
+ "NonLayerRepeatSpec",
+ "NonNormalizedSpec",
+ "NumberLocale",
+ "NumericArrayMarkPropDef",
+ "NumericMarkPropDef",
+ "OffsetDef",
+ "Opacity",
+ "OpacityDatum",
+ "OpacityValue",
+ "Order",
+ "OrderFieldDef",
+ "OrderOnlyDef",
+ "OrderValue",
+ "OrderValueDef",
+ "Orient",
+ "Orientation",
+ "OverlayMarkDef",
+ "Padding",
+ "Parameter",
+ "ParameterExpression",
+ "ParameterExtent",
+ "ParameterName",
+ "ParameterPredicate",
+ "Parse",
+ "ParseValue",
+ "PivotTransform",
+ "Point",
+ "PointSelectionConfig",
+ "PointSelectionConfigWithoutType",
+ "PolarDef",
+ "Polygon",
+ "Position",
+ "Position2Def",
+ "PositionDatumDef",
+ "PositionDatumDefBase",
+ "PositionDef",
+ "PositionFieldDef",
+ "PositionFieldDefBase",
+ "PositionValueDef",
+ "Predicate",
+ "PredicateComposition",
+ "PrimitiveValue",
+ "Projection",
+ "ProjectionConfig",
+ "ProjectionType",
+ "QuantileTransform",
+ "RadialGradient",
+ "Radius",
+ "Radius2",
+ "Radius2Datum",
+ "Radius2Value",
+ "RadiusDatum",
+ "RadiusValue",
+ "RangeConfig",
+ "RangeEnum",
+ "RangeRaw",
+ "RangeRawArray",
+ "RangeScheme",
+ "RectConfig",
+ "RegressionTransform",
+ "RelativeBandSize",
+ "RepeatChart",
+ "RepeatMapping",
+ "RepeatRef",
+ "RepeatSpec",
+ "Resolve",
+ "ResolveMode",
+ "Root",
+ "Row",
+ "RowColLayoutAlign",
+ "RowColboolean",
+ "RowColnumber",
+ "RowColumnEncodingFieldDef",
+ "SCHEMA_URL",
+ "SCHEMA_VERSION",
+ "SampleTransform",
+ "Scale",
+ "ScaleBinParams",
+ "ScaleBins",
+ "ScaleConfig",
+ "ScaleDatumDef",
+ "ScaleFieldDef",
+ "ScaleInterpolateEnum",
+ "ScaleInterpolateParams",
+ "ScaleInvalidDataConfig",
+ "ScaleInvalidDataShowAsValueangle",
+ "ScaleInvalidDataShowAsValuecolor",
+ "ScaleInvalidDataShowAsValuefill",
+ "ScaleInvalidDataShowAsValuefillOpacity",
+ "ScaleInvalidDataShowAsValueopacity",
+ "ScaleInvalidDataShowAsValueradius",
+ "ScaleInvalidDataShowAsValueshape",
+ "ScaleInvalidDataShowAsValuesize",
+ "ScaleInvalidDataShowAsValuestroke",
+ "ScaleInvalidDataShowAsValuestrokeDash",
+ "ScaleInvalidDataShowAsValuestrokeOpacity",
+ "ScaleInvalidDataShowAsValuestrokeWidth",
+ "ScaleInvalidDataShowAsValuetheta",
+ "ScaleInvalidDataShowAsValuex",
+ "ScaleInvalidDataShowAsValuexOffset",
+ "ScaleInvalidDataShowAsValuey",
+ "ScaleInvalidDataShowAsValueyOffset",
+ "ScaleInvalidDataShowAsangle",
+ "ScaleInvalidDataShowAscolor",
+ "ScaleInvalidDataShowAsfill",
+ "ScaleInvalidDataShowAsfillOpacity",
+ "ScaleInvalidDataShowAsopacity",
+ "ScaleInvalidDataShowAsradius",
+ "ScaleInvalidDataShowAsshape",
+ "ScaleInvalidDataShowAssize",
+ "ScaleInvalidDataShowAsstroke",
+ "ScaleInvalidDataShowAsstrokeDash",
+ "ScaleInvalidDataShowAsstrokeOpacity",
+ "ScaleInvalidDataShowAsstrokeWidth",
+ "ScaleInvalidDataShowAstheta",
+ "ScaleInvalidDataShowAsx",
+ "ScaleInvalidDataShowAsxOffset",
+ "ScaleInvalidDataShowAsy",
+ "ScaleInvalidDataShowAsyOffset",
+ "ScaleResolveMap",
+ "ScaleType",
+ "SchemaBase",
+ "SchemeParams",
+ "SecondaryFieldDef",
+ "SelectionConfig",
+ "SelectionExpression",
+ "SelectionInit",
+ "SelectionInitInterval",
+ "SelectionInitIntervalMapping",
+ "SelectionInitMapping",
+ "SelectionParameter",
+ "SelectionPredicateComposition",
+ "SelectionResolution",
+ "SelectionType",
+ "SequenceGenerator",
+ "SequenceParams",
+ "SequentialMultiHue",
+ "SequentialSingleHue",
+ "Shape",
+ "ShapeDatum",
+ "ShapeDef",
+ "ShapeValue",
+ "SharedEncoding",
+ "SingleDefUnitChannel",
+ "SingleTimeUnit",
+ "Size",
+ "SizeDatum",
+ "SizeValue",
+ "Sort",
+ "SortArray",
+ "SortByChannel",
+ "SortByChannelDesc",
+ "SortByEncoding",
+ "SortField",
+ "SortOrder",
+ "Spec",
+ "SphereGenerator",
+ "StackOffset",
+ "StackTransform",
+ "StandardType",
+ "Step",
+ "StepFor",
+ "Stream",
+ "StringFieldDef",
+ "StringFieldDefWithCondition",
+ "StringValueDefWithCondition",
+ "Stroke",
+ "StrokeCap",
+ "StrokeDash",
+ "StrokeDashDatum",
+ "StrokeDashValue",
+ "StrokeDatum",
+ "StrokeJoin",
+ "StrokeOpacity",
+ "StrokeOpacityDatum",
+ "StrokeOpacityValue",
+ "StrokeValue",
+ "StrokeWidth",
+ "StrokeWidthDatum",
+ "StrokeWidthValue",
+ "StyleConfigIndex",
+ "SymbolShape",
+ "TOPLEVEL_ONLY_KEYS",
+ "Text",
+ "TextBaseline",
+ "TextDatum",
+ "TextDef",
+ "TextDirection",
+ "TextValue",
+ "Then",
+ "Theta",
+ "Theta2",
+ "Theta2Datum",
+ "Theta2Value",
+ "ThetaDatum",
+ "ThetaValue",
+ "TickConfig",
+ "TickCount",
+ "TimeInterval",
+ "TimeIntervalStep",
+ "TimeLocale",
+ "TimeUnit",
+ "TimeUnitParams",
+ "TimeUnitTransform",
+ "TimeUnitTransformParams",
+ "Title",
+ "TitleAnchor",
+ "TitleConfig",
+ "TitleFrame",
+ "TitleOrient",
+ "TitleParams",
+ "Tooltip",
+ "TooltipContent",
+ "TooltipValue",
+ "TopLevelConcatSpec",
+ "TopLevelFacetSpec",
+ "TopLevelHConcatSpec",
+ "TopLevelLayerSpec",
+ "TopLevelMixin",
+ "TopLevelParameter",
+ "TopLevelRepeatSpec",
+ "TopLevelSelectionParameter",
+ "TopLevelSpec",
+ "TopLevelUnitSpec",
+ "TopLevelVConcatSpec",
+ "TopoDataFormat",
+ "Transform",
+ "Type",
+ "TypeForShape",
+ "TypedFieldDef",
+ "URI",
+ "Undefined",
+ "UnitSpec",
+ "UnitSpecWithFrame",
+ "Url",
+ "UrlData",
+ "UrlValue",
+ "UtcMultiTimeUnit",
+ "UtcSingleTimeUnit",
+ "VConcatChart",
+ "VConcatSpecGenericSpec",
+ "VEGAEMBED_VERSION",
+ "VEGALITE_VERSION",
+ "VEGA_VERSION",
+ "ValueChannelMixin",
+ "ValueDefWithConditionMarkPropFieldOrDatumDefGradientstringnull",
+ "ValueDefWithConditionMarkPropFieldOrDatumDefTypeForShapestringnull",
+ "ValueDefWithConditionMarkPropFieldOrDatumDefnumber",
+ "ValueDefWithConditionMarkPropFieldOrDatumDefnumberArray",
+ "ValueDefWithConditionMarkPropFieldOrDatumDefstringnull",
+ "ValueDefWithConditionStringFieldDefText",
+ "ValueDefnumber",
+ "ValueDefnumberwidthheightExprRef",
+ "VariableParameter",
+ "Vector10string",
+ "Vector12string",
+ "Vector2DateTime",
+ "Vector2Vector2number",
+ "Vector2boolean",
+ "Vector2number",
+ "Vector2string",
+ "Vector3number",
+ "Vector7string",
+ "VegaLite",
+ "VegaLiteSchema",
+ "ViewBackground",
+ "ViewConfig",
+ "When",
+ "WindowEventType",
+ "WindowFieldDef",
+ "WindowOnlyOp",
+ "WindowTransform",
+ "X",
+ "X2",
+ "X2Datum",
+ "X2Value",
+ "XDatum",
+ "XError",
+ "XError2",
+ "XError2Value",
+ "XErrorValue",
+ "XOffset",
+ "XOffsetDatum",
+ "XOffsetValue",
+ "XValue",
+ "Y",
+ "Y2",
+ "Y2Datum",
+ "Y2Value",
+ "YDatum",
+ "YError",
+ "YError2",
+ "YError2Value",
+ "YErrorValue",
+ "YOffset",
+ "YOffsetDatum",
+ "YOffsetValue",
+ "YValue",
+ "api",
+ "binding",
+ "binding_checkbox",
+ "binding_radio",
+ "binding_range",
+ "binding_select",
+ "channels",
+ "check_fields_and_encodings",
+ "compiler",
+ "concat",
+ "condition",
+ "core",
+ "data",
+ "data_transformers",
+ "datum",
+ "default_data_transformer",
+ "display",
+ "expr",
+ "graticule",
+ "hconcat",
+ "jupyter",
+ "layer",
+ "limit_rows",
+ "load_ipython_extension",
+ "load_schema",
+ "mixins",
+ "param",
+ "parse_shorthand",
+ "renderers",
+ "repeat",
+ "sample",
+ "schema",
+ "selection_interval",
+ "selection_point",
+ "sequence",
+ "sphere",
+ "theme",
+ "themes",
+ "to_csv",
+ "to_json",
+ "to_values",
+ "topo_feature",
+ "typing",
+ "utils",
+ "v5",
+ "value",
+ "vconcat",
+ "vegalite",
+ "vegalite_compilers",
+ "when",
+ "with_property_setters",
+]
+
+
+def __dir__():
+ return __all__
+
+
+from altair.vegalite import *
+from altair.vegalite.v5.schema.core import Dict
+from altair.jupyter import JupyterChart
+from altair.expr import expr
+from altair.utils import AltairDeprecationWarning, parse_shorthand, Undefined
+from altair import typing
+
+
+def load_ipython_extension(ipython):
+ from altair._magics import vegalite
+
+ ipython.register_magic_function(vegalite, "cell")
diff --git a/videollama2/lib/python3.10/site-packages/altair/_magics.py b/videollama2/lib/python3.10/site-packages/altair/_magics.py
new file mode 100644
index 0000000000000000000000000000000000000000..05186367ee1aa477e8a38f99259f10d590a518ad
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/altair/_magics.py
@@ -0,0 +1,110 @@
+"""Magic functions for rendering vega-lite specifications."""
+
+__all__ = ["vegalite"]
+
+import json
+import warnings
+
+import IPython
+from IPython.core import magic_arguments
+from narwhals.dependencies import is_pandas_dataframe as _is_pandas_dataframe
+
+from altair.vegalite import v5 as vegalite_v5
+
+try:
+ import yaml
+
+ YAML_AVAILABLE = True
+except ImportError:
+ YAML_AVAILABLE = False
+
+
+RENDERERS = {
+ "vega-lite": {
+ "5": vegalite_v5.VegaLite,
+ },
+}
+
+
+TRANSFORMERS = {
+ "vega-lite": {
+ "5": vegalite_v5.data_transformers,
+ },
+}
+
+
+def _prepare_data(data, data_transformers):
+ """Convert input data to data for use within schema."""
+ if data is None or isinstance(data, dict):
+ return data
+ elif _is_pandas_dataframe(data):
+ if func := data_transformers.get():
+ data = func(data)
+ return data
+ elif isinstance(data, str):
+ return {"url": data}
+ else:
+ warnings.warn(f"data of type {type(data)} not recognized", stacklevel=1)
+ return data
+
+
+def _get_variable(name):
+ """Get a variable from the notebook namespace."""
+ ip = IPython.get_ipython()
+ if ip is None:
+ msg = (
+ "Magic command must be run within an IPython "
+ "environment, in which get_ipython() is defined."
+ )
+ raise ValueError(msg)
+ if name not in ip.user_ns:
+ msg = f"argument '{name}' does not match the name of any defined variable"
+ raise NameError(msg)
+ return ip.user_ns[name]
+
+
+@magic_arguments.magic_arguments()
+@magic_arguments.argument(
+ "data",
+ nargs="?",
+ help="local variablename of a pandas DataFrame to be used as the dataset",
+)
+@magic_arguments.argument("-v", "--version", dest="version", default="v5")
+@magic_arguments.argument("-j", "--json", dest="json", action="store_true")
+def vegalite(line, cell):
+ """
+ Cell magic for displaying vega-lite visualizations in CoLab.
+
+ %%vegalite [dataframe] [--json] [--version='v5']
+
+ Visualize the contents of the cell using Vega-Lite, optionally
+ specifying a pandas DataFrame object to be used as the dataset.
+
+ if --json is passed, then input is parsed as json rather than yaml.
+ """
+ args = magic_arguments.parse_argstring(vegalite, line)
+ existing_versions = {"v5": "5"}
+ version = existing_versions[args.version]
+ assert version in RENDERERS["vega-lite"]
+ VegaLite = RENDERERS["vega-lite"][version]
+ data_transformers = TRANSFORMERS["vega-lite"][version]
+
+ if args.json:
+ spec = json.loads(cell)
+ elif not YAML_AVAILABLE:
+ try:
+ spec = json.loads(cell)
+ except json.JSONDecodeError as err:
+ msg = (
+ "%%vegalite: spec is not valid JSON. "
+ "Install pyyaml to parse spec as yaml"
+ )
+ raise ValueError(msg) from err
+ else:
+ spec = yaml.load(cell, Loader=yaml.SafeLoader)
+
+ if args.data is not None:
+ data = _get_variable(args.data)
+ spec["data"] = _prepare_data(data, data_transformers)
+
+ return VegaLite(spec)
diff --git a/videollama2/lib/python3.10/site-packages/altair/jupyter/__init__.py b/videollama2/lib/python3.10/site-packages/altair/jupyter/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..c57815a2f1325efaf1a02970fe287e646cdac9c3
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/altair/jupyter/__init__.py
@@ -0,0 +1,21 @@
+try:
+ import anywidget # noqa: F401
+except ImportError:
+ # When anywidget isn't available, create stand-in JupyterChart class
+ # that raises an informative import error on construction. This
+ # way we can make JupyterChart available in the altair namespace
+ # when anywidget is not installed
+ class JupyterChart:
+ def __init__(self, *args, **kwargs):
+ msg = (
+ "The Altair JupyterChart requires the anywidget \n"
+ "Python package which may be installed using pip with\n"
+ " pip install anywidget\n"
+ "or using conda with\n"
+ " conda install -c conda-forge anywidget\n"
+ "Afterwards, you will need to restart your Python kernel."
+ )
+ raise ImportError(msg)
+
+else:
+ from .jupyter_chart import JupyterChart # noqa: F401
diff --git a/videollama2/lib/python3.10/site-packages/altair/jupyter/__pycache__/__init__.cpython-310.pyc b/videollama2/lib/python3.10/site-packages/altair/jupyter/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..123b001245b4b459502e7bfacf94a85b0a11e180
Binary files /dev/null and b/videollama2/lib/python3.10/site-packages/altair/jupyter/__pycache__/__init__.cpython-310.pyc differ
diff --git a/videollama2/lib/python3.10/site-packages/altair/jupyter/__pycache__/jupyter_chart.cpython-310.pyc b/videollama2/lib/python3.10/site-packages/altair/jupyter/__pycache__/jupyter_chart.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cf3490a832818a4fed25e5fb5706552ae8e5ca08
Binary files /dev/null and b/videollama2/lib/python3.10/site-packages/altair/jupyter/__pycache__/jupyter_chart.cpython-310.pyc differ
diff --git a/videollama2/lib/python3.10/site-packages/altair/jupyter/js/README.md b/videollama2/lib/python3.10/site-packages/altair/jupyter/js/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f1ec545894f60fea2a2096b4ac4b588c890b5192
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/altair/jupyter/js/README.md
@@ -0,0 +1,2 @@
+# JupyterChart
+This directory contains the JavaScript portion of the Altair `JupyterChart`. The `JupyterChart` is based on the [AnyWidget](https://anywidget.dev/) project.
diff --git a/videollama2/lib/python3.10/site-packages/altair/jupyter/js/index.js b/videollama2/lib/python3.10/site-packages/altair/jupyter/js/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..58b936091d00fb6a572d977b33a64e15dc32d47e
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/altair/jupyter/js/index.js
@@ -0,0 +1,230 @@
+import vegaEmbed from "https://esm.sh/vega-embed@6?deps=vega@5&deps=vega-lite@5.20.1";
+import lodashDebounce from "https://esm.sh/lodash-es@4.17.21/debounce";
+
+// Note: For offline support, the import lines above are removed and the remaining script
+// is bundled using vl-convert's javascript_bundle function. See the documentation of
+// the javascript_bundle function for details on the available imports and their names.
+// If an additional import is required in the future, it will need to be added to vl-convert
+// in order to preserve offline support.
+async function render({ model, el }) {
+ let finalize;
+
+ function showError(error){
+ el.innerHTML = (
+ '
'
+ + '
JavaScript Error: ' + error.message + '
'
+ + "
This usually means there's a typo in your chart specification. "
+ + "See the javascript console for the full traceback.
"
+ + '
'
+ );
+ }
+
+ const reembed = async () => {
+ if (finalize != null) {
+ finalize();
+ }
+
+ model.set("local_tz", Intl.DateTimeFormat().resolvedOptions().timeZone);
+
+ let spec = structuredClone(model.get("spec"));
+ if (spec == null) {
+ // Remove any existing chart and return
+ while (el.firstChild) {
+ el.removeChild(el.lastChild);
+ }
+ model.save_changes();
+ return;
+ }
+ let embedOptions = structuredClone(model.get("embed_options")) ?? undefined;
+
+ let api;
+ try {
+ api = await vegaEmbed(el, spec, embedOptions);
+ } catch (error) {
+ showError(error)
+ return;
+ }
+
+ finalize = api.finalize;
+
+ // Debounce config
+ const wait = model.get("debounce_wait") ?? 10;
+ const debounceOpts = {leading: false, trailing: true};
+ if (model.get("max_wait") ?? true) {
+ debounceOpts["maxWait"] = wait;
+ }
+
+ const initialSelections = {};
+ for (const selectionName of Object.keys(model.get("_vl_selections"))) {
+ const storeName = `${selectionName}_store`;
+ const selectionHandler = (_, value) => {
+ const newSelections = cleanJson(model.get("_vl_selections") ?? {});
+ const store = cleanJson(api.view.data(storeName) ?? []);
+
+ newSelections[selectionName] = {value, store};
+ model.set("_vl_selections", newSelections);
+ model.save_changes();
+ };
+ api.view.addSignalListener(selectionName, lodashDebounce(selectionHandler, wait, debounceOpts));
+
+ initialSelections[selectionName] = {
+ value: cleanJson(api.view.signal(selectionName) ?? {}),
+ store: cleanJson(api.view.data(storeName) ?? [])
+ }
+ }
+ model.set("_vl_selections", initialSelections);
+
+ const initialParams = {};
+ for (const paramName of Object.keys(model.get("_params"))) {
+ const paramHandler = (_, value) => {
+ const newParams = JSON.parse(JSON.stringify(model.get("_params"))) || {};
+ newParams[paramName] = value;
+ model.set("_params", newParams);
+ model.save_changes();
+ };
+ api.view.addSignalListener(paramName, lodashDebounce(paramHandler, wait, debounceOpts));
+
+ initialParams[paramName] = api.view.signal(paramName) ?? null
+ }
+ model.set("_params", initialParams);
+ model.save_changes();
+
+ // Param change callback
+ model.on('change:_params', async (new_params) => {
+ for (const [param, value] of Object.entries(new_params.changed._params)) {
+ api.view.signal(param, value);
+ }
+ await api.view.runAsync();
+ });
+
+ // Add signal/data listeners
+ for (const watch of model.get("_js_watch_plan") ?? []) {
+ if (watch.namespace === "data") {
+ const dataHandler = (_, value) => {
+ model.set("_js_to_py_updates", [{
+ namespace: "data",
+ name: watch.name,
+ scope: watch.scope,
+ value: cleanJson(value)
+ }]);
+ model.save_changes();
+ };
+ addDataListener(api.view, watch.name, watch.scope, lodashDebounce(dataHandler, wait, debounceOpts))
+
+ } else if (watch.namespace === "signal") {
+ const signalHandler = (_, value) => {
+ model.set("_js_to_py_updates", [{
+ namespace: "signal",
+ name: watch.name,
+ scope: watch.scope,
+ value: cleanJson(value)
+ }]);
+ model.save_changes();
+ };
+
+ addSignalListener(api.view, watch.name, watch.scope, lodashDebounce(signalHandler, wait, debounceOpts))
+ }
+ }
+
+ // Add signal/data updaters
+ model.on('change:_py_to_js_updates', async (updates) => {
+ for (const update of updates.changed._py_to_js_updates ?? []) {
+ if (update.namespace === "signal") {
+ setSignalValue(api.view, update.name, update.scope, update.value);
+ } else if (update.namespace === "data") {
+ setDataValue(api.view, update.name, update.scope, update.value);
+ }
+ }
+ await api.view.runAsync();
+ });
+ }
+
+ model.on('change:spec', reembed);
+ model.on('change:embed_options', reembed);
+ model.on('change:debounce_wait', reembed);
+ model.on('change:max_wait', reembed);
+ await reembed();
+}
+
+function cleanJson(data) {
+ return JSON.parse(JSON.stringify(data))
+}
+
+function getNestedRuntime(view, scope) {
+ var runtime = view._runtime;
+ for (const index of scope) {
+ runtime = runtime.subcontext[index];
+ }
+ return runtime
+}
+
+function lookupSignalOp(view, name, scope) {
+ let parent_runtime = getNestedRuntime(view, scope);
+ return parent_runtime.signals[name] ?? null;
+}
+
+function dataRef(view, name, scope) {
+ let parent_runtime = getNestedRuntime(view, scope);
+ return parent_runtime.data[name];
+}
+
+export function setSignalValue(view, name, scope, value) {
+ let signal_op = lookupSignalOp(view, name, scope);
+ view.update(signal_op, value);
+}
+
+export function setDataValue(view, name, scope, value) {
+ let dataset = dataRef(view, name, scope);
+ let changeset = view.changeset().remove(() => true).insert(value)
+ dataset.modified = true;
+ view.pulse(dataset.input, changeset);
+}
+
+export function addSignalListener(view, name, scope, handler) {
+ let signal_op = lookupSignalOp(view, name, scope);
+ return addOperatorListener(
+ view,
+ name,
+ signal_op,
+ handler,
+ );
+}
+
+export function addDataListener(view, name, scope, handler) {
+ let dataset = dataRef(view, name, scope).values;
+ return addOperatorListener(
+ view,
+ name,
+ dataset,
+ handler,
+ );
+}
+
+// Private helpers from Vega for dealing with nested signals/data
+function findOperatorHandler(op, handler) {
+ const h = (op._targets || [])
+ .filter(op => op._update && op._update.handler === handler);
+ return h.length ? h[0] : null;
+}
+
+function addOperatorListener(view, name, op, handler) {
+ let h = findOperatorHandler(op, handler);
+ if (!h) {
+ h = trap(view, () => handler(name, op.value));
+ h.handler = handler;
+ view.on(op, null, h);
+ }
+ return view;
+}
+
+function trap(view, fn) {
+ return !fn ? null : function() {
+ try {
+ fn.apply(this, arguments);
+ } catch (error) {
+ view.error(error);
+ }
+ };
+}
+
+export default { render }
diff --git a/videollama2/lib/python3.10/site-packages/altair/jupyter/jupyter_chart.py b/videollama2/lib/python3.10/site-packages/altair/jupyter/jupyter_chart.py
new file mode 100644
index 0000000000000000000000000000000000000000..675cce15fbb3510889ca44ec7eada299eea9d554
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/altair/jupyter/jupyter_chart.py
@@ -0,0 +1,404 @@
+from __future__ import annotations
+
+import json
+import pathlib
+from typing import Any
+
+import anywidget
+import traitlets
+
+import altair as alt
+from altair import TopLevelSpec
+from altair.utils._vegafusion_data import (
+ compile_to_vegafusion_chart_state,
+ using_vegafusion,
+)
+from altair.utils.selection import IndexSelection, IntervalSelection, PointSelection
+
+_here = pathlib.Path(__file__).parent
+
+
+class Params(traitlets.HasTraits):
+ """Traitlet class storing a JupyterChart's params."""
+
+ def __init__(self, trait_values):
+ super().__init__()
+
+ for key, value in trait_values.items():
+ if isinstance(value, (int, float)):
+ traitlet_type = traitlets.Float()
+ elif isinstance(value, str):
+ traitlet_type = traitlets.Unicode()
+ elif isinstance(value, list):
+ traitlet_type = traitlets.List()
+ elif isinstance(value, dict):
+ traitlet_type = traitlets.Dict()
+ else:
+ traitlet_type = traitlets.Any()
+
+ # Add the new trait.
+ self.add_traits(**{key: traitlet_type})
+
+ # Set the trait's value.
+ setattr(self, key, value)
+
+ def __repr__(self):
+ return f"Params({self.trait_values()})"
+
+
+class Selections(traitlets.HasTraits):
+ """Traitlet class storing a JupyterChart's selections."""
+
+ def __init__(self, trait_values):
+ super().__init__()
+
+ for key, value in trait_values.items():
+ if isinstance(value, IndexSelection):
+ traitlet_type = traitlets.Instance(IndexSelection)
+ elif isinstance(value, PointSelection):
+ traitlet_type = traitlets.Instance(PointSelection)
+ elif isinstance(value, IntervalSelection):
+ traitlet_type = traitlets.Instance(IntervalSelection)
+ else:
+ msg = f"Unexpected selection type: {type(value)}"
+ raise ValueError(msg)
+
+ # Add the new trait.
+ self.add_traits(**{key: traitlet_type})
+
+ # Set the trait's value.
+ setattr(self, key, value)
+
+ # Make read-only
+ self.observe(self._make_read_only, names=key)
+
+ def __repr__(self):
+ return f"Selections({self.trait_values()})"
+
+ def _make_read_only(self, change):
+ """Work around to make traits read-only, but still allow us to change them internally."""
+ if change["name"] in self.traits() and change["old"] != change["new"]:
+ self._set_value(change["name"], change["old"])
+ msg = (
+ "Selections may not be set from Python.\n"
+ f"Attempted to set select: {change['name']}"
+ )
+ raise ValueError(msg)
+
+ def _set_value(self, key, value):
+ self.unobserve(self._make_read_only, names=key)
+ setattr(self, key, value)
+ self.observe(self._make_read_only, names=key)
+
+
+def load_js_src() -> str:
+ return (_here / "js" / "index.js").read_text()
+
+
+class JupyterChart(anywidget.AnyWidget):
+ _esm = load_js_src()
+ _css = r"""
+ .vega-embed {
+ /* Make sure action menu isn't cut off */
+ overflow: visible;
+ }
+ """
+
+ # Public traitlets
+ chart = traitlets.Instance(TopLevelSpec, allow_none=True)
+ spec = traitlets.Dict(allow_none=True).tag(sync=True)
+ debounce_wait = traitlets.Float(default_value=10).tag(sync=True)
+ max_wait = traitlets.Bool(default_value=True).tag(sync=True)
+ local_tz = traitlets.Unicode(default_value=None, allow_none=True).tag(sync=True)
+ debug = traitlets.Bool(default_value=False)
+ embed_options = traitlets.Dict(default_value=None, allow_none=True).tag(sync=True)
+
+ # Internal selection traitlets
+ _selection_types = traitlets.Dict()
+ _vl_selections = traitlets.Dict().tag(sync=True)
+
+ # Internal param traitlets
+ _params = traitlets.Dict().tag(sync=True)
+
+ # Internal comm traitlets for VegaFusion support
+ _chart_state = traitlets.Any(allow_none=True)
+ _js_watch_plan = traitlets.Any(allow_none=True).tag(sync=True)
+ _js_to_py_updates = traitlets.Any(allow_none=True).tag(sync=True)
+ _py_to_js_updates = traitlets.Any(allow_none=True).tag(sync=True)
+
+ # Track whether charts are configured for offline use
+ _is_offline = False
+
+ @classmethod
+ def enable_offline(cls, offline: bool = True):
+ """
+ Configure JupyterChart's offline behavior.
+
+ Parameters
+ ----------
+ offline: bool
+ If True, configure JupyterChart to operate in offline mode where JavaScript
+ dependencies are loaded from vl-convert.
+ If False, configure it to operate in online mode where JavaScript dependencies
+ are loaded from CDN dynamically. This is the default behavior.
+ """
+ from altair.utils._importers import import_vl_convert, vl_version_for_vl_convert
+
+ if offline:
+ if cls._is_offline:
+ # Already offline
+ return
+
+ vlc = import_vl_convert()
+
+ src_lines = load_js_src().split("\n")
+
+ # Remove leading lines with only whitespace, comments, or imports
+ while src_lines and (
+ len(src_lines[0].strip()) == 0
+ or src_lines[0].startswith("import")
+ or src_lines[0].startswith("//")
+ ):
+ src_lines.pop(0)
+
+ src = "\n".join(src_lines)
+
+ # vl-convert's javascript_bundle function creates a self-contained JavaScript bundle
+ # for JavaScript snippets that import from a small set of dependencies that
+ # vl-convert includes. To see the available imports and their imported names, run
+ # import vl_convert as vlc
+ # help(vlc.javascript_bundle)
+ bundled_src = vlc.javascript_bundle(
+ src, vl_version=vl_version_for_vl_convert()
+ )
+ cls._esm = bundled_src
+ cls._is_offline = True
+ else:
+ cls._esm = load_js_src()
+ cls._is_offline = False
+
+ def __init__(
+ self,
+ chart: TopLevelSpec,
+ debounce_wait: int = 10,
+ max_wait: bool = True,
+ debug: bool = False,
+ embed_options: dict | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Jupyter Widget for displaying and updating Altair Charts, and retrieving selection and parameter values.
+
+ Parameters
+ ----------
+ chart: Chart
+ Altair Chart instance
+ debounce_wait: int
+ Debouncing wait time in milliseconds. Updates will be sent from the client to the kernel
+ after debounce_wait milliseconds of no chart interactions.
+ max_wait: bool
+ If True (default), updates will be sent from the client to the kernel every debounce_wait
+ milliseconds even if there are ongoing chart interactions. If False, updates will not be
+ sent until chart interactions have completed.
+ debug: bool
+ If True, debug messages will be printed
+ embed_options: dict
+ Options to pass to vega-embed.
+ See https://github.com/vega/vega-embed?tab=readme-ov-file#options
+ """
+ self.params = Params({})
+ self.selections = Selections({})
+ super().__init__(
+ chart=chart,
+ debounce_wait=debounce_wait,
+ max_wait=max_wait,
+ debug=debug,
+ embed_options=embed_options,
+ **kwargs,
+ )
+
+ @traitlets.observe("chart")
+ def _on_change_chart(self, change): # noqa: C901
+ """Updates the JupyterChart's internal state when the wrapped Chart instance changes."""
+ new_chart = change.new
+ selection_watches = []
+ selection_types = {}
+ initial_params = {}
+ initial_vl_selections = {}
+ empty_selections = {}
+
+ if new_chart is None:
+ with self.hold_sync():
+ self.spec = None
+ self._selection_types = selection_types
+ self._vl_selections = initial_vl_selections
+ self._params = initial_params
+ return
+
+ params = getattr(new_chart, "params", [])
+
+ if params is not alt.Undefined:
+ for param in new_chart.params:
+ if isinstance(param.name, alt.ParameterName):
+ clean_name = param.name.to_json().strip('"')
+ else:
+ clean_name = param.name
+
+ select = getattr(param, "select", alt.Undefined)
+
+ if select != alt.Undefined:
+ if not isinstance(select, dict):
+ select = select.to_dict()
+
+ select_type = select["type"]
+ if select_type == "point":
+ if not (
+ select.get("fields", None) or select.get("encodings", None)
+ ):
+ # Point selection with no associated fields or encodings specified.
+ # This is an index-based selection
+ selection_types[clean_name] = "index"
+ empty_selections[clean_name] = IndexSelection(
+ name=clean_name, value=[], store=[]
+ )
+ else:
+ selection_types[clean_name] = "point"
+ empty_selections[clean_name] = PointSelection(
+ name=clean_name, value=[], store=[]
+ )
+ elif select_type == "interval":
+ selection_types[clean_name] = "interval"
+ empty_selections[clean_name] = IntervalSelection(
+ name=clean_name, value={}, store=[]
+ )
+ else:
+ msg = f"Unexpected selection type {select.type}"
+ raise ValueError(msg)
+ selection_watches.append(clean_name)
+ initial_vl_selections[clean_name] = {"value": None, "store": []}
+ else:
+ clean_value = param.value if param.value != alt.Undefined else None
+ initial_params[clean_name] = clean_value
+
+ # Handle the params generated by transforms
+ for param_name in collect_transform_params(new_chart):
+ initial_params[param_name] = None
+
+ # Setup params
+ self.params = Params(initial_params)
+
+ def on_param_traitlet_changed(param_change):
+ new_params = dict(self._params)
+ new_params[param_change["name"]] = param_change["new"]
+ self._params = new_params
+
+ self.params.observe(on_param_traitlet_changed)
+
+ # Setup selections
+ self.selections = Selections(empty_selections)
+
+ # Update properties all together
+ with self.hold_sync():
+ if using_vegafusion():
+ if self.local_tz is None:
+ self.spec = None
+
+ def on_local_tz_change(change):
+ self._init_with_vegafusion(change["new"])
+
+ self.observe(on_local_tz_change, ["local_tz"])
+ else:
+ self._init_with_vegafusion(self.local_tz)
+ else:
+ self.spec = new_chart.to_dict()
+ self._selection_types = selection_types
+ self._vl_selections = initial_vl_selections
+ self._params = initial_params
+
+ def _init_with_vegafusion(self, local_tz: str):
+ if self.chart is not None:
+ vegalite_spec = self.chart.to_dict(context={"pre_transform": False})
+ with self.hold_sync():
+ self._chart_state = compile_to_vegafusion_chart_state(
+ vegalite_spec, local_tz
+ )
+ self._js_watch_plan = self._chart_state.get_watch_plan()[
+ "client_to_server"
+ ]
+ self.spec = self._chart_state.get_transformed_spec()
+
+ # Callback to update chart state and send updates back to client
+ def on_js_to_py_updates(change):
+ if self.debug:
+ updates_str = json.dumps(change["new"], indent=2)
+ print(
+ f"JavaScript to Python VegaFusion updates:\n {updates_str}"
+ )
+ updates = self._chart_state.update(change["new"])
+ if self.debug:
+ updates_str = json.dumps(updates, indent=2)
+ print(
+ f"Python to JavaScript VegaFusion updates:\n {updates_str}"
+ )
+ self._py_to_js_updates = updates
+
+ self.observe(on_js_to_py_updates, ["_js_to_py_updates"])
+
+ @traitlets.observe("_params")
+ def _on_change_params(self, change):
+ for param_name, value in change.new.items():
+ setattr(self.params, param_name, value)
+
+ @traitlets.observe("_vl_selections")
+ def _on_change_selections(self, change):
+ """Updates the JupyterChart's public selections traitlet in response to changes that the JavaScript logic makes to the internal _selections traitlet."""
+ for selection_name, selection_dict in change.new.items():
+ value = selection_dict["value"]
+ store = selection_dict["store"]
+ selection_type = self._selection_types[selection_name]
+ if selection_type == "index":
+ self.selections._set_value(
+ selection_name,
+ IndexSelection.from_vega(selection_name, signal=value, store=store),
+ )
+ elif selection_type == "point":
+ self.selections._set_value(
+ selection_name,
+ PointSelection.from_vega(selection_name, signal=value, store=store),
+ )
+ elif selection_type == "interval":
+ self.selections._set_value(
+ selection_name,
+ IntervalSelection.from_vega(
+ selection_name, signal=value, store=store
+ ),
+ )
+
+
+def collect_transform_params(chart: TopLevelSpec) -> set[str]:
+ """
+ Collect the names of params that are defined by transforms.
+
+ Parameters
+ ----------
+ chart: Chart from which to extract transform params
+
+ Returns
+ -------
+ set of param names
+ """
+ transform_params = set()
+
+ # Handle recursive case
+ for prop in ("layer", "concat", "hconcat", "vconcat"):
+ for child in getattr(chart, prop, []):
+ transform_params.update(collect_transform_params(child))
+
+ # Handle chart's own transforms
+ transforms = getattr(chart, "transform", [])
+ transforms = transforms if transforms != alt.Undefined else []
+ for tx in transforms:
+ if hasattr(tx, "param"):
+ transform_params.add(tx.param)
+
+ return transform_params
diff --git a/videollama2/lib/python3.10/site-packages/altair/py.typed b/videollama2/lib/python3.10/site-packages/altair/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/videollama2/lib/python3.10/site-packages/altair/typing.py b/videollama2/lib/python3.10/site-packages/altair/typing.py
new file mode 100644
index 0000000000000000000000000000000000000000..cd8cb14897b6148e197fb8cb3afdc4f512cf720a
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/altair/typing.py
@@ -0,0 +1,96 @@
+"""Public types to ease integrating with `altair`."""
+
+from __future__ import annotations
+
+__all__ = [
+ "ChannelAngle",
+ "ChannelColor",
+ "ChannelColumn",
+ "ChannelDescription",
+ "ChannelDetail",
+ "ChannelFacet",
+ "ChannelFill",
+ "ChannelFillOpacity",
+ "ChannelHref",
+ "ChannelKey",
+ "ChannelLatitude",
+ "ChannelLatitude2",
+ "ChannelLongitude",
+ "ChannelLongitude2",
+ "ChannelOpacity",
+ "ChannelOrder",
+ "ChannelRadius",
+ "ChannelRadius2",
+ "ChannelRow",
+ "ChannelShape",
+ "ChannelSize",
+ "ChannelStroke",
+ "ChannelStrokeDash",
+ "ChannelStrokeOpacity",
+ "ChannelStrokeWidth",
+ "ChannelText",
+ "ChannelTheta",
+ "ChannelTheta2",
+ "ChannelTooltip",
+ "ChannelUrl",
+ "ChannelX",
+ "ChannelX2",
+ "ChannelXError",
+ "ChannelXError2",
+ "ChannelXOffset",
+ "ChannelY",
+ "ChannelY2",
+ "ChannelYError",
+ "ChannelYError2",
+ "ChannelYOffset",
+ "ChartType",
+ "EncodeKwds",
+ "Optional",
+ "is_chart_type",
+]
+
+from altair.utils.schemapi import Optional
+from altair.vegalite.v5.api import ChartType, is_chart_type
+from altair.vegalite.v5.schema.channels import (
+ ChannelAngle,
+ ChannelColor,
+ ChannelColumn,
+ ChannelDescription,
+ ChannelDetail,
+ ChannelFacet,
+ ChannelFill,
+ ChannelFillOpacity,
+ ChannelHref,
+ ChannelKey,
+ ChannelLatitude,
+ ChannelLatitude2,
+ ChannelLongitude,
+ ChannelLongitude2,
+ ChannelOpacity,
+ ChannelOrder,
+ ChannelRadius,
+ ChannelRadius2,
+ ChannelRow,
+ ChannelShape,
+ ChannelSize,
+ ChannelStroke,
+ ChannelStrokeDash,
+ ChannelStrokeOpacity,
+ ChannelStrokeWidth,
+ ChannelText,
+ ChannelTheta,
+ ChannelTheta2,
+ ChannelTooltip,
+ ChannelUrl,
+ ChannelX,
+ ChannelX2,
+ ChannelXError,
+ ChannelXError2,
+ ChannelXOffset,
+ ChannelY,
+ ChannelY2,
+ ChannelYError,
+ ChannelYError2,
+ ChannelYOffset,
+ EncodeKwds,
+)
diff --git a/videollama2/lib/python3.10/site-packages/altair/utils/server.py b/videollama2/lib/python3.10/site-packages/altair/utils/server.py
new file mode 100644
index 0000000000000000000000000000000000000000..d773b7d25af75c4989d5bbf1594ea1f6c311649e
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/altair/utils/server.py
@@ -0,0 +1,151 @@
+"""
+A Simple server used to show altair graphics from a prompt or script.
+
+This is adapted from the mpld3 package; see
+https://github.com/mpld3/mpld3/blob/master/mpld3/_server.py
+"""
+
+import itertools
+import random
+import socket
+import sys
+import threading
+import webbrowser
+from http import server
+from io import BytesIO as IO
+
+JUPYTER_WARNING = """
+Note: if you're in the Jupyter notebook, Chart.serve() is not the best
+ way to view plots. Consider using Chart.display().
+You must interrupt the kernel to cancel this command.
+"""
+
+
+# Mock server used for testing
+
+
+class MockRequest:
+ def makefile(self, *args, **kwargs):
+ return IO(b"GET /")
+
+ def sendall(self, response):
+ pass
+
+
+class MockServer:
+ def __init__(self, ip_port, Handler):
+ Handler(MockRequest(), ip_port[0], self)
+
+ def serve_forever(self):
+ pass
+
+ def server_close(self):
+ pass
+
+
+def generate_handler(html, files=None):
+ if files is None:
+ files = {}
+
+ class MyHandler(server.BaseHTTPRequestHandler):
+ def do_GET(self):
+ """Respond to a GET request."""
+ if self.path == "/":
+ self.send_response(200)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ self.wfile.write(html.encode())
+ elif self.path in files:
+ content_type, content = files[self.path]
+ self.send_response(200)
+ self.send_header("Content-type", content_type)
+ self.end_headers()
+ self.wfile.write(content.encode())
+ else:
+ self.send_error(404)
+
+ return MyHandler
+
+
+def find_open_port(ip, port, n=50):
+ """Find an open port near the specified port."""
+ ports = itertools.chain(
+ (port + i for i in range(n)), (port + random.randint(-2 * n, 2 * n))
+ )
+
+ for port in ports:
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ result = s.connect_ex((ip, port))
+ s.close()
+ if result != 0:
+ return port
+ msg = "no open ports found"
+ raise ValueError(msg)
+
+
+def serve(
+ html,
+ ip="127.0.0.1",
+ port=8888,
+ n_retries=50,
+ files=None,
+ jupyter_warning=True,
+ open_browser=True,
+ http_server=None,
+) -> None:
+ """
+ Start a server serving the given HTML, and (optionally) open a browser.
+
+ Parameters
+ ----------
+ html : string
+ HTML to serve
+ ip : string (default = '127.0.0.1')
+ ip address at which the HTML will be served.
+ port : int (default = 8888)
+ the port at which to serve the HTML
+ n_retries : int (default = 50)
+ the number of nearby ports to search if the specified port is in use.
+ files : dictionary (optional)
+ dictionary of extra content to serve
+ jupyter_warning : bool (optional)
+ if True (default), then print a warning if this is used within Jupyter
+ open_browser : bool (optional)
+ if True (default), then open a web browser to the given HTML
+ http_server : class (optional)
+ optionally specify an HTTPServer class to use for showing the
+ figure. The default is Python's basic HTTPServer.
+ """
+ port = find_open_port(ip, port, n_retries)
+ Handler = generate_handler(html, files)
+
+ if http_server is None:
+ srvr = server.HTTPServer((ip, port), Handler)
+ else:
+ srvr = http_server((ip, port), Handler)
+
+ if jupyter_warning:
+ try:
+ __IPYTHON__ # type: ignore # noqa
+ except NameError:
+ pass
+ else:
+ print(JUPYTER_WARNING)
+
+ # Start the server
+ print(f"Serving to http://{ip}:{port}/ [Ctrl-C to exit]")
+ sys.stdout.flush()
+
+ if open_browser:
+ # Use a thread to open a web browser pointing to the server
+ def b():
+ return webbrowser.open(f"http://{ip}:{port}")
+
+ threading.Thread(target=b).start()
+
+ try:
+ srvr.serve_forever()
+ except (KeyboardInterrupt, SystemExit):
+ print("\nstopping Server...")
+
+ srvr.server_close()
diff --git a/videollama2/lib/python3.10/site-packages/nvidia/curand/include/__pycache__/__init__.cpython-310.pyc b/videollama2/lib/python3.10/site-packages/nvidia/curand/include/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1f65582e4ad5ca304efdd89c0c882e29084f77a8
Binary files /dev/null and b/videollama2/lib/python3.10/site-packages/nvidia/curand/include/__pycache__/__init__.cpython-310.pyc differ
diff --git a/videollama2/lib/python3.10/site-packages/nvidia/curand/include/curand_kernel.h b/videollama2/lib/python3.10/site-packages/nvidia/curand/include/curand_kernel.h
new file mode 100644
index 0000000000000000000000000000000000000000..c9d27c5f271cae931091c85ecfe37b5e7d427bf3
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/nvidia/curand/include/curand_kernel.h
@@ -0,0 +1,1677 @@
+
+ /* Copyright 2010-2014 NVIDIA Corporation. All rights reserved.
+ *
+ * NOTICE TO LICENSEE:
+ *
+ * The source code and/or documentation ("Licensed Deliverables") are
+ * subject to NVIDIA intellectual property rights under U.S. and
+ * international Copyright laws.
+ *
+ * The Licensed Deliverables contained herein are PROPRIETARY and
+ * CONFIDENTIAL to NVIDIA and are being provided under the terms and
+ * conditions of a form of NVIDIA software license agreement by and
+ * between NVIDIA and Licensee ("License Agreement") or electronically
+ * accepted by Licensee. Notwithstanding any terms or conditions to
+ * the contrary in the License Agreement, reproduction or disclosure
+ * of the Licensed Deliverables to any third party without the express
+ * written consent of NVIDIA is prohibited.
+ *
+ * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
+ * LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
+ * SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE. THEY ARE
+ * PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
+ * NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
+ * DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
+ * NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
+ * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
+ * LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
+ * SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
+ * DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+ * WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ * ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
+ * OF THESE LICENSED DELIVERABLES.
+ *
+ * U.S. Government End Users. These Licensed Deliverables are a
+ * "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
+ * 1995), consisting of "commercial computer software" and "commercial
+ * computer software documentation" as such terms are used in 48
+ * C.F.R. 12.212 (SEPT 1995) and are provided to the U.S. Government
+ * only as a commercial end item. Consistent with 48 C.F.R.12.212 and
+ * 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
+ * U.S. Government End Users acquire the Licensed Deliverables with
+ * only those rights set forth herein.
+ *
+ * Any use of the Licensed Deliverables in individual and commercial
+ * software must include, in the user documentation and internal
+ * comments to the code, the above Disclaimer and U.S. Government End
+ * Users Notice.
+ */
+
+
+#if !defined(CURAND_KERNEL_H_)
+#define CURAND_KERNEL_H_
+
+/**
+ * \defgroup DEVICE Device API
+ *
+ * @{
+ */
+
+#if !defined(QUALIFIERS)
+#define QUALIFIERS static __forceinline__ __device__
+#endif
+
+/* To prevent unused parameter warnings */
+#if !defined(GCC_UNUSED_PARAMETER)
+#if defined(__GNUC__)
+#define GCC_UNUSED_PARAMETER __attribute__((unused))
+#else
+#define GCC_UNUSED_PARAMETER
+#endif /* defined(__GNUC__) */
+#endif /* !defined(GCC_UNUSED_PARAMETER) */
+
+#include
+
+#ifdef __CUDACC_RTC__
+#define CURAND_DETAIL_USE_CUDA_STL
+#endif
+
+#if __cplusplus >= 201103L
+# ifdef CURAND_DETAIL_USE_CUDA_STL
+# define CURAND_STD cuda::std
+# include
+# else
+# define CURAND_STD std
+# include
+# endif // CURAND_DETAIL_USE_CUDA_STL
+#else
+// To support C++03 compilation
+# define CURAND_STD curand_detail
+namespace curand_detail {
+ template
+ struct enable_if {};
+
+ template
+ struct enable_if { typedef T type; };
+
+ template
+ struct is_same { static const bool value = false; };
+
+ template
+ struct is_same { static const bool value = true; };
+} // namespace curand_detail
+#endif // __cplusplus >= 201103L
+
+#ifndef __CUDACC_RTC__
+#include
+#endif // __CUDACC_RTC__
+
+#include "curand.h"
+#include "curand_discrete.h"
+#include "curand_precalc.h"
+#include "curand_mrg32k3a.h"
+#include "curand_mtgp32_kernel.h"
+#include "curand_philox4x32_x.h"
+#include "curand_globals.h"
+
+/* Test RNG */
+/* This generator uses the formula:
+ x_n = x_(n-1) + 1 mod 2^32
+ x_0 = (unsigned int)seed * 3
+ Subsequences are spaced 31337 steps apart.
+*/
+struct curandStateTest {
+ unsigned int v;
+};
+
+/** \cond UNHIDE_TYPEDEFS */
+typedef struct curandStateTest curandStateTest_t;
+/** \endcond */
+
+/* XORSHIFT FAMILY RNGs */
+/* These generators are a family proposed by Marsaglia. They keep state
+ in 32 bit chunks, then use repeated shift and xor operations to scramble
+ the bits. The following generators are a combination of a simple Weyl
+ generator with an N variable XORSHIFT generator.
+*/
+
+/* XORSHIFT RNG */
+/* This generator uses the xorwow formula of
+www.jstatsoft.org/v08/i14/paper page 5
+Has period 2^192 - 2^32.
+*/
+/**
+ * CURAND XORWOW state
+ */
+struct curandStateXORWOW;
+
+/*
+ * Implementation details not in reference documentation */
+struct curandStateXORWOW {
+ unsigned int d, v[5];
+ int boxmuller_flag;
+ int boxmuller_flag_double;
+ float boxmuller_extra;
+ double boxmuller_extra_double;
+};
+
+/*
+ * CURAND XORWOW state
+ */
+/** \cond UNHIDE_TYPEDEFS */
+typedef struct curandStateXORWOW curandStateXORWOW_t;
+
+#define EXTRA_FLAG_NORMAL 0x00000001
+#define EXTRA_FLAG_LOG_NORMAL 0x00000002
+/** \endcond */
+
+/* Combined Multiple Recursive Generators */
+/* These generators are a family proposed by L'Ecuyer. They keep state
+ in sets of doubles, then use repeated modular arithmetic multiply operations
+ to scramble the bits in each set, and combine the result.
+*/
+
+/* MRG32k3a RNG */
+/* This generator uses the MRG32k3A formula of
+http://www.iro.umontreal.ca/~lecuyer/myftp/streams00/c++/streams4.pdf
+Has period 2^191.
+*/
+
+/* moduli for the recursions */
+/** \cond UNHIDE_DEFINES */
+#define MRG32K3A_MOD1 4294967087.
+#define MRG32K3A_MOD2 4294944443.
+
+/* Constants used in generation */
+
+#define MRG32K3A_A12 1403580.
+#define MRG32K3A_A13N 810728.
+#define MRG32K3A_A21 527612.
+#define MRG32K3A_A23N 1370589.
+#define MRG32K3A_NORM (2.3283065498378288e-10)
+//
+// #define MRG32K3A_BITS_NORM ((double)((POW32_DOUBLE-1.0)/MOD1))
+// above constant, used verbatim, rounds differently on some host systems.
+#define MRG32K3A_BITS_NORM 1.000000048662
+
+/** \endcond */
+
+
+
+
+/**
+ * CURAND MRG32K3A state
+ */
+struct curandStateMRG32k3a;
+
+/* Implementation details not in reference documentation */
+struct curandStateMRG32k3a {
+ unsigned int s1[3];
+ unsigned int s2[3];
+ int boxmuller_flag;
+ int boxmuller_flag_double;
+ float boxmuller_extra;
+ double boxmuller_extra_double;
+};
+
+/*
+ * CURAND MRG32K3A state
+ */
+/** \cond UNHIDE_TYPEDEFS */
+typedef struct curandStateMRG32k3a curandStateMRG32k3a_t;
+/** \endcond */
+
+/* SOBOL QRNG */
+/**
+ * CURAND Sobol32 state
+ */
+struct curandStateSobol32;
+
+/* Implementation details not in reference documentation */
+struct curandStateSobol32 {
+ unsigned int i, x, c;
+ unsigned int direction_vectors[32];
+};
+
+/*
+ * CURAND Sobol32 state
+ */
+/** \cond UNHIDE_TYPEDEFS */
+typedef struct curandStateSobol32 curandStateSobol32_t;
+/** \endcond */
+
+/**
+ * CURAND Scrambled Sobol32 state
+ */
+struct curandStateScrambledSobol32;
+
+/* Implementation details not in reference documentation */
+struct curandStateScrambledSobol32 {
+ unsigned int i, x, c;
+ unsigned int direction_vectors[32];
+};
+
+/*
+ * CURAND Scrambled Sobol32 state
+ */
+/** \cond UNHIDE_TYPEDEFS */
+typedef struct curandStateScrambledSobol32 curandStateScrambledSobol32_t;
+/** \endcond */
+
+/**
+ * CURAND Sobol64 state
+ */
+struct curandStateSobol64;
+
+/* Implementation details not in reference documentation */
+struct curandStateSobol64 {
+ unsigned long long i, x, c;
+ unsigned long long direction_vectors[64];
+};
+
+/*
+ * CURAND Sobol64 state
+ */
+/** \cond UNHIDE_TYPEDEFS */
+typedef struct curandStateSobol64 curandStateSobol64_t;
+/** \endcond */
+
+/**
+ * CURAND Scrambled Sobol64 state
+ */
+struct curandStateScrambledSobol64;
+
+/* Implementation details not in reference documentation */
+struct curandStateScrambledSobol64 {
+ unsigned long long i, x, c;
+ unsigned long long direction_vectors[64];
+};
+
+/*
+ * CURAND Scrambled Sobol64 state
+ */
+/** \cond UNHIDE_TYPEDEFS */
+typedef struct curandStateScrambledSobol64 curandStateScrambledSobol64_t;
+/** \endcond */
+
+/*
+ * Default RNG
+ */
+/** \cond UNHIDE_TYPEDEFS */
+typedef struct curandStateXORWOW curandState_t;
+typedef struct curandStateXORWOW curandState;
+/** \endcond */
+
+/****************************************************************************/
+/* Utility functions needed by RNGs */
+/****************************************************************************/
+/** \cond UNHIDE_UTILITIES */
+/*
+ multiply vector by matrix, store in result
+ matrix is n x n, measured in 32 bit units
+ matrix is stored in row major order
+ vector and result cannot be same pointer
+*/
+template
+QUALIFIERS void __curand_matvec_inplace(unsigned int *vector, unsigned int *matrix)
+{
+ unsigned int result[N] = { 0 };
+ for(int i = 0; i < N; i++) {
+ #ifdef __CUDA_ARCH__
+ #pragma unroll 16
+ #endif
+ for(int j = 0; j < 32; j++) {
+ if(vector[i] & (1 << j)) {
+ for(int k = 0; k < N; k++) {
+ result[k] ^= matrix[N * (i * 32 + j) + k];
+ }
+ }
+ }
+ }
+ for(int i = 0; i < N; i++) {
+ vector[i] = result[i];
+ }
+}
+
+QUALIFIERS void __curand_matvec(unsigned int *vector, unsigned int *matrix,
+ unsigned int *result, int n)
+{
+ for(int i = 0; i < n; i++) {
+ result[i] = 0;
+ }
+ for(int i = 0; i < n; i++) {
+ for(int j = 0; j < 32; j++) {
+ if(vector[i] & (1 << j)) {
+ for(int k = 0; k < n; k++) {
+ result[k] ^= matrix[n * (i * 32 + j) + k];
+ }
+ }
+ }
+ }
+}
+
+/* generate identity matrix */
+QUALIFIERS void __curand_matidentity(unsigned int *matrix, int n)
+{
+ int r;
+ for(int i = 0; i < n * 32; i++) {
+ for(int j = 0; j < n; j++) {
+ r = i & 31;
+ if(i / 32 == j) {
+ matrix[i * n + j] = (1 << r);
+ } else {
+ matrix[i * n + j] = 0;
+ }
+ }
+ }
+}
+
+/* multiply matrixA by matrixB, store back in matrixA
+ matrixA and matrixB must not be same matrix */
+QUALIFIERS void __curand_matmat(unsigned int *matrixA, unsigned int *matrixB, int n)
+{
+ unsigned int result[MAX_XOR_N];
+ for(int i = 0; i < n * 32; i++) {
+ __curand_matvec(matrixA + i * n, matrixB, result, n);
+ for(int j = 0; j < n; j++) {
+ matrixA[i * n + j] = result[j];
+ }
+ }
+}
+
+/* copy vectorA to vector */
+QUALIFIERS void __curand_veccopy(unsigned int *vector, unsigned int *vectorA, int n)
+{
+ for(int i = 0; i < n; i++) {
+ vector[i] = vectorA[i];
+ }
+}
+
+/* copy matrixA to matrix */
+QUALIFIERS void __curand_matcopy(unsigned int *matrix, unsigned int *matrixA, int n)
+{
+ for(int i = 0; i < n * n * 32; i++) {
+ matrix[i] = matrixA[i];
+ }
+}
+
+/* compute matrixA to power p, store result in matrix */
+QUALIFIERS void __curand_matpow(unsigned int *matrix, unsigned int *matrixA,
+ unsigned long long p, int n)
+{
+ unsigned int matrixR[MAX_XOR_N * MAX_XOR_N * 32];
+ unsigned int matrixS[MAX_XOR_N * MAX_XOR_N * 32];
+ __curand_matidentity(matrix, n);
+ __curand_matcopy(matrixR, matrixA, n);
+ while(p) {
+ if(p & 1) {
+ __curand_matmat(matrix, matrixR, n);
+ }
+ __curand_matcopy(matrixS, matrixR, n);
+ __curand_matmat(matrixR, matrixS, n);
+ p >>= 1;
+ }
+}
+
+/****************************************************************************/
+/* Utility functions needed by MRG32k3a RNG */
+/* Matrix operations modulo some integer less than 2**32, done in */
+/* double precision floating point, with care not to overflow 53 bits */
+/****************************************************************************/
+
+/* return i mod m. */
+/* assumes i and m are integers represented accurately in doubles */
+
+QUALIFIERS double curand_MRGmod(double i, double m)
+{
+ double quo;
+ double rem;
+ quo = floor(i/m);
+ rem = i - (quo*m);
+ if (rem < 0.0) rem += m;
+ return rem;
+}
+
+/* Multiplication modulo m. Inputs i and j less than 2**32 */
+/* Ensure intermediate results do not exceed 2**53 */
+
+QUALIFIERS double curand_MRGmodMul(double i, double j, double m)
+{
+ double tempHi;
+ double tempLo;
+
+ tempHi = floor(i/131072.0);
+ tempLo = i - (tempHi*131072.0);
+ tempLo = curand_MRGmod( curand_MRGmod( (tempHi * j), m) * 131072.0 + curand_MRGmod(tempLo * j, m),m);
+
+ if (tempLo < 0.0) tempLo += m;
+ return tempLo;
+}
+
+/* multiply 3 by 3 matrices of doubles, modulo m */
+
+QUALIFIERS void curand_MRGmatMul3x3(unsigned int i1[][3],unsigned int i2[][3],unsigned int o[][3],double m)
+{
+ int i,j;
+ double temp[3][3];
+ for (i=0; i<3; i++){
+ for (j=0; j<3; j++){
+ temp[i][j] = ( curand_MRGmodMul(i1[i][0], i2[0][j], m) +
+ curand_MRGmodMul(i1[i][1], i2[1][j], m) +
+ curand_MRGmodMul(i1[i][2], i2[2][j], m));
+ temp[i][j] = curand_MRGmod( temp[i][j], m );
+ }
+ }
+ for (i=0; i<3; i++){
+ for (j=0; j<3; j++){
+ o[i][j] = (unsigned int)temp[i][j];
+ }
+ }
+}
+
+/* multiply 3 by 3 matrix times 3 by 1 vector of doubles, modulo m */
+
+QUALIFIERS void curand_MRGmatVecMul3x3( unsigned int i[][3], unsigned int v[], double m)
+{
+ int k;
+ double t[3];
+ for (k = 0; k < 3; k++) {
+ t[k] = ( curand_MRGmodMul(i[k][0], v[0], m) +
+ curand_MRGmodMul(i[k][1], v[1], m) +
+ curand_MRGmodMul(i[k][2], v[2], m) );
+ t[k] = curand_MRGmod( t[k], m );
+ }
+ for (k = 0; k < 3; k++) {
+ v[k] = (unsigned int)t[k];
+ }
+
+}
+
+/* raise a 3 by 3 matrix of doubles to a 64 bit integer power pow, modulo m */
+/* input is index zero of an array of 3 by 3 matrices m, */
+/* each m = m[0]**(2**index) */
+
+QUALIFIERS void curand_MRGmatPow3x3( unsigned int in[][3][3], unsigned int o[][3], double m, unsigned long long pow )
+{
+ int i,j;
+ for ( i = 0; i < 3; i++ ) {
+ for ( j = 0; j < 3; j++ ) {
+ o[i][j] = 0;
+ if ( i == j ) o[i][j] = 1;
+ }
+ }
+ i = 0;
+ curand_MRGmatVecMul3x3(o,o[0],m);
+ while (pow) {
+ if ( pow & 1ll ) {
+ curand_MRGmatMul3x3(in[i], o, o, m);
+ }
+ i++;
+ pow >>= 1;
+ }
+}
+
+/* raise a 3 by 3 matrix of doubles to the power */
+/* 2 to the power (pow modulo 191), modulo m */
+
+QUALIFIERS void curnand_MRGmatPow2Pow3x3( double in[][3], double o[][3], double m, unsigned long pow )
+{
+ unsigned int temp[3][3];
+ int i,j;
+ pow = pow % 191;
+ for ( i = 0; i < 3; i++ ) {
+ for ( j = 0; j < 3; j++ ) {
+ temp[i][j] = (unsigned int)in[i][j];
+ }
+ }
+ while (pow) {
+ curand_MRGmatMul3x3(temp, temp, temp, m);
+ pow--;
+ }
+ for ( i = 0; i < 3; i++ ) {
+ for ( j = 0; j < 3; j++ ) {
+ o[i][j] = temp[i][j];
+ }
+ }
+}
+
+/** \endcond */
+
+/****************************************************************************/
+/* Kernel implementations of RNGs */
+/****************************************************************************/
+
+/* Test RNG */
+
+QUALIFIERS void curand_init(unsigned long long seed,
+ unsigned long long subsequence,
+ unsigned long long offset,
+ curandStateTest_t *state)
+{
+ state->v = (unsigned int)(seed * 3) + (unsigned int)(subsequence * 31337) + \
+ (unsigned int)offset;
+}
+
+
+QUALIFIERS unsigned int curand(curandStateTest_t *state)
+{
+ unsigned int r = state->v++;
+ return r;
+}
+
+QUALIFIERS void skipahead(unsigned long long n, curandStateTest_t *state)
+{
+ state->v += (unsigned int)n;
+}
+
+/* XORWOW RNG */
+
+template
+QUALIFIERS void __curand_generate_skipahead_matrix_xor(unsigned int matrix[])
+{
+ T state;
+ // Generate matrix that advances one step
+ // matrix has n * n * 32 32-bit elements
+ // solve for matrix by stepping single bit states
+ for(int i = 0; i < 32 * n; i++) {
+ state.d = 0;
+ for(int j = 0; j < n; j++) {
+ state.v[j] = 0;
+ }
+ state.v[i / 32] = (1 << (i & 31));
+ curand(&state);
+ for(int j = 0; j < n; j++) {
+ matrix[i * n + j] = state.v[j];
+ }
+ }
+}
+
+template
+QUALIFIERS void _skipahead_scratch(unsigned long long x, T *state, unsigned int *scratch)
+{
+ // unsigned int matrix[n * n * 32];
+ unsigned int *matrix = scratch;
+ // unsigned int matrixA[n * n * 32];
+ unsigned int *matrixA = scratch + (n * n * 32);
+ // unsigned int vector[n];
+ unsigned int *vector = scratch + (n * n * 32) + (n * n * 32);
+ // unsigned int result[n];
+ unsigned int *result = scratch + (n * n * 32) + (n * n * 32) + n;
+ unsigned long long p = x;
+ for(int i = 0; i < n; i++) {
+ vector[i] = state->v[i];
+ }
+ int matrix_num = 0;
+ while(p && (matrix_num < PRECALC_NUM_MATRICES - 1)) {
+ for(unsigned int t = 0; t < (p & PRECALC_BLOCK_MASK); t++) {
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ __curand_matvec(vector, precalc_xorwow_offset_matrix[matrix_num], result, n);
+,
+ __curand_matvec(vector, precalc_xorwow_offset_matrix_host[matrix_num], result, n);
+)
+ __curand_veccopy(vector, result, n);
+ }
+ p >>= PRECALC_BLOCK_SIZE;
+ matrix_num++;
+ }
+ if(p) {
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ __curand_matcopy(matrix, precalc_xorwow_offset_matrix[PRECALC_NUM_MATRICES - 1], n);
+ __curand_matcopy(matrixA, precalc_xorwow_offset_matrix[PRECALC_NUM_MATRICES - 1], n);
+,
+ __curand_matcopy(matrix, precalc_xorwow_offset_matrix_host[PRECALC_NUM_MATRICES - 1], n);
+ __curand_matcopy(matrixA, precalc_xorwow_offset_matrix_host[PRECALC_NUM_MATRICES - 1], n);
+)
+ }
+ while(p) {
+ for(unsigned int t = 0; t < (p & SKIPAHEAD_MASK); t++) {
+ __curand_matvec(vector, matrixA, result, n);
+ __curand_veccopy(vector, result, n);
+ }
+ p >>= SKIPAHEAD_BLOCKSIZE;
+ if(p) {
+ for(int i = 0; i < SKIPAHEAD_BLOCKSIZE; i++) {
+ __curand_matmat(matrix, matrixA, n);
+ __curand_matcopy(matrixA, matrix, n);
+ }
+ }
+ }
+ for(int i = 0; i < n; i++) {
+ state->v[i] = vector[i];
+ }
+ state->d += 362437 * (unsigned int)x;
+}
+
+template
+QUALIFIERS void _skipahead_sequence_scratch(unsigned long long x, T *state, unsigned int *scratch)
+{
+ // unsigned int matrix[n * n * 32];
+ unsigned int *matrix = scratch;
+ // unsigned int matrixA[n * n * 32];
+ unsigned int *matrixA = scratch + (n * n * 32);
+ // unsigned int vector[n];
+ unsigned int *vector = scratch + (n * n * 32) + (n * n * 32);
+ // unsigned int result[n];
+ unsigned int *result = scratch + (n * n * 32) + (n * n * 32) + n;
+ unsigned long long p = x;
+ for(int i = 0; i < n; i++) {
+ vector[i] = state->v[i];
+ }
+ int matrix_num = 0;
+ while(p && matrix_num < PRECALC_NUM_MATRICES - 1) {
+ for(unsigned int t = 0; t < (p & PRECALC_BLOCK_MASK); t++) {
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ __curand_matvec(vector, precalc_xorwow_matrix[matrix_num], result, n);
+,
+ __curand_matvec(vector, precalc_xorwow_matrix_host[matrix_num], result, n);
+)
+ __curand_veccopy(vector, result, n);
+ }
+ p >>= PRECALC_BLOCK_SIZE;
+ matrix_num++;
+ }
+ if(p) {
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ __curand_matcopy(matrix, precalc_xorwow_matrix[PRECALC_NUM_MATRICES - 1], n);
+ __curand_matcopy(matrixA, precalc_xorwow_matrix[PRECALC_NUM_MATRICES - 1], n);
+,
+ __curand_matcopy(matrix, precalc_xorwow_matrix_host[PRECALC_NUM_MATRICES - 1], n);
+ __curand_matcopy(matrixA, precalc_xorwow_matrix_host[PRECALC_NUM_MATRICES - 1], n);
+)
+ }
+ while(p) {
+ for(unsigned int t = 0; t < (p & SKIPAHEAD_MASK); t++) {
+ __curand_matvec(vector, matrixA, result, n);
+ __curand_veccopy(vector, result, n);
+ }
+ p >>= SKIPAHEAD_BLOCKSIZE;
+ if(p) {
+ for(int i = 0; i < SKIPAHEAD_BLOCKSIZE; i++) {
+ __curand_matmat(matrix, matrixA, n);
+ __curand_matcopy(matrixA, matrix, n);
+ }
+ }
+ }
+ for(int i = 0; i < n; i++) {
+ state->v[i] = vector[i];
+ }
+ /* No update of state->d needed, guaranteed to be a multiple of 2^32 */
+}
+
+template
+QUALIFIERS void _skipahead_inplace(const unsigned long long x, T *state)
+{
+ unsigned long long p = x;
+ int matrix_num = 0;
+ while(p) {
+ for(unsigned int t = 0; t < (p & PRECALC_BLOCK_MASK); t++) {
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ __curand_matvec_inplace(state->v, precalc_xorwow_offset_matrix[matrix_num]);
+,
+ __curand_matvec_inplace(state->v, precalc_xorwow_offset_matrix_host[matrix_num]);
+)
+ }
+ p >>= PRECALC_BLOCK_SIZE;
+ matrix_num++;
+ }
+ state->d += 362437 * (unsigned int)x;
+}
+
+template
+QUALIFIERS void _skipahead_sequence_inplace(unsigned long long x, T *state)
+{
+ int matrix_num = 0;
+ while(x) {
+ for(unsigned int t = 0; t < (x & PRECALC_BLOCK_MASK); t++) {
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ __curand_matvec_inplace(state->v, precalc_xorwow_matrix[matrix_num]);
+,
+ __curand_matvec_inplace(state->v, precalc_xorwow_matrix_host[matrix_num]);
+)
+ }
+ x >>= PRECALC_BLOCK_SIZE;
+ matrix_num++;
+ }
+ /* No update of state->d needed, guaranteed to be a multiple of 2^32 */
+}
+
+/**
+ * \brief Update XORWOW state to skip \p n elements.
+ *
+ * Update the XORWOW state in \p state to skip ahead \p n elements.
+ *
+ * All values of \p n are valid. Large values require more computation and so
+ * will take more time to complete.
+ *
+ * \param n - Number of elements to skip
+ * \param state - Pointer to state to update
+ */
+QUALIFIERS void skipahead(unsigned long long n, curandStateXORWOW_t *state)
+{
+ _skipahead_inplace(n, state);
+}
+
+/**
+ * \brief Update XORWOW state to skip ahead \p n subsequences.
+ *
+ * Update the XORWOW state in \p state to skip ahead \p n subsequences. Each
+ * subsequence is \xmlonly267\endxmlonly elements long, so this means the function will skip ahead
+ * \xmlonly267\endxmlonly * n elements.
+ *
+ * All values of \p n are valid. Large values require more computation and so
+ * will take more time to complete.
+ *
+ * \param n - Number of subsequences to skip
+ * \param state - Pointer to state to update
+ */
+QUALIFIERS void skipahead_sequence(unsigned long long n, curandStateXORWOW_t *state)
+{
+ _skipahead_sequence_inplace(n, state);
+}
+
+QUALIFIERS void _curand_init_scratch(unsigned long long seed,
+ unsigned long long subsequence,
+ unsigned long long offset,
+ curandStateXORWOW_t *state,
+ unsigned int *scratch)
+{
+ // Break up seed, apply salt
+ // Constants are arbitrary nonzero values
+ unsigned int s0 = ((unsigned int)seed) ^ 0xaad26b49UL;
+ unsigned int s1 = (unsigned int)(seed >> 32) ^ 0xf7dcefddUL;
+ // Simple multiplication to mix up bits
+ // Constants are arbitrary odd values
+ unsigned int t0 = 1099087573UL * s0;
+ unsigned int t1 = 2591861531UL * s1;
+ state->d = 6615241 + t1 + t0;
+ state->v[0] = 123456789UL + t0;
+ state->v[1] = 362436069UL ^ t0;
+ state->v[2] = 521288629UL + t1;
+ state->v[3] = 88675123UL ^ t1;
+ state->v[4] = 5783321UL + t0;
+ _skipahead_sequence_scratch(subsequence, state, scratch);
+ _skipahead_scratch(offset, state, scratch);
+ state->boxmuller_flag = 0;
+ state->boxmuller_flag_double = 0;
+ state->boxmuller_extra = 0.f;
+ state->boxmuller_extra_double = 0.;
+}
+
+QUALIFIERS void _curand_init_inplace(unsigned long long seed,
+ unsigned long long subsequence,
+ unsigned long long offset,
+ curandStateXORWOW_t *state)
+{
+ // Break up seed, apply salt
+ // Constants are arbitrary nonzero values
+ unsigned int s0 = ((unsigned int)seed) ^ 0xaad26b49UL;
+ unsigned int s1 = (unsigned int)(seed >> 32) ^ 0xf7dcefddUL;
+ // Simple multiplication to mix up bits
+ // Constants are arbitrary odd values
+ unsigned int t0 = 1099087573UL * s0;
+ unsigned int t1 = 2591861531UL * s1;
+ state->d = 6615241 + t1 + t0;
+ state->v[0] = 123456789UL + t0;
+ state->v[1] = 362436069UL ^ t0;
+ state->v[2] = 521288629UL + t1;
+ state->v[3] = 88675123UL ^ t1;
+ state->v[4] = 5783321UL + t0;
+ _skipahead_sequence_inplace(subsequence, state);
+ _skipahead_inplace(offset, state);
+ state->boxmuller_flag = 0;
+ state->boxmuller_flag_double = 0;
+ state->boxmuller_extra = 0.f;
+ state->boxmuller_extra_double = 0.;
+}
+
+/**
+ * \brief Initialize XORWOW state.
+ *
+ * Initialize XORWOW state in \p state with the given \p seed, \p subsequence,
+ * and \p offset.
+ *
+ * All input values of \p seed, \p subsequence, and \p offset are legal. Large
+ * values for \p subsequence and \p offset require more computation and so will
+ * take more time to complete.
+ *
+ * A value of 0 for \p seed sets the state to the values of the original
+ * published version of the \p xorwow algorithm.
+ *
+ * \param seed - Arbitrary bits to use as a seed
+ * \param subsequence - Subsequence to start at
+ * \param offset - Absolute offset into sequence
+ * \param state - Pointer to state to initialize
+ */
+QUALIFIERS void curand_init(unsigned long long seed,
+ unsigned long long subsequence,
+ unsigned long long offset,
+ curandStateXORWOW_t *state)
+{
+ _curand_init_inplace(seed, subsequence, offset, state);
+}
+
+/**
+ * \brief Return 32-bits of pseudorandomness from an XORWOW generator.
+ *
+ * Return 32-bits of pseudorandomness from the XORWOW generator in \p state,
+ * increment position of generator by one.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 32-bits of pseudorandomness as an unsigned int, all bits valid to use.
+ */
+QUALIFIERS unsigned int curand(curandStateXORWOW_t *state)
+{
+ unsigned int t;
+ t = (state->v[0] ^ (state->v[0] >> 2));
+ state->v[0] = state->v[1];
+ state->v[1] = state->v[2];
+ state->v[2] = state->v[3];
+ state->v[3] = state->v[4];
+ state->v[4] = (state->v[4] ^ (state->v[4] <<4)) ^ (t ^ (t << 1));
+ state->d += 362437;
+ return state->v[4] + state->d;
+}
+
+
+/**
+ * \brief Return 32-bits of pseudorandomness from an Philox4_32_10 generator.
+ *
+ * Return 32-bits of pseudorandomness from the Philox4_32_10 generator in \p state,
+ * increment position of generator by one.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 32-bits of pseudorandomness as an unsigned int, all bits valid to use.
+ */
+
+QUALIFIERS unsigned int curand(curandStatePhilox4_32_10_t *state)
+{
+ // Maintain the invariant: output[STATE] is always "good" and
+ // is the next value to be returned by curand.
+ unsigned int ret;
+ switch(state->STATE++){
+ default:
+ ret = state->output.x;
+ break;
+ case 1:
+ ret = state->output.y;
+ break;
+ case 2:
+ ret = state->output.z;
+ break;
+ case 3:
+ ret = state->output.w;
+ break;
+ }
+ if(state->STATE == 4){
+ Philox_State_Incr(state);
+ state->output = curand_Philox4x32_10(state->ctr,state->key);
+ state->STATE = 0;
+ }
+ return ret;
+}
+
+/**
+ * \brief Return tuple of 4 32-bit pseudorandoms from a Philox4_32_10 generator.
+ *
+ * Return 128 bits of pseudorandomness from the Philox4_32_10 generator in \p state,
+ * increment position of generator by four.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 128-bits of pseudorandomness as a uint4, all bits valid to use.
+ */
+
+QUALIFIERS uint4 curand4(curandStatePhilox4_32_10_t *state)
+{
+ uint4 r;
+
+ uint4 tmp = state->output;
+ Philox_State_Incr(state);
+ state->output= curand_Philox4x32_10(state->ctr,state->key);
+ switch(state->STATE){
+ case 0:
+ return tmp;
+ case 1:
+ r.x = tmp.y;
+ r.y = tmp.z;
+ r.z = tmp.w;
+ r.w = state->output.x;
+ break;
+ case 2:
+ r.x = tmp.z;
+ r.y = tmp.w;
+ r.z = state->output.x;
+ r.w = state->output.y;
+ break;
+ case 3:
+ r.x = tmp.w;
+ r.y = state->output.x;
+ r.z = state->output.y;
+ r.w = state->output.z;
+ break;
+ default:
+ // NOT possible but needed to avoid compiler warnings
+ return tmp;
+ }
+ return r;
+}
+
+/**
+ * \brief Update Philox4_32_10 state to skip \p n elements.
+ *
+ * Update the Philox4_32_10 state in \p state to skip ahead \p n elements.
+ *
+ * All values of \p n are valid.
+ *
+ * \param n - Number of elements to skip
+ * \param state - Pointer to state to update
+ */
+QUALIFIERS void skipahead(unsigned long long n, curandStatePhilox4_32_10_t *state)
+{
+ state->STATE += (n & 3);
+ n /= 4;
+ if( state->STATE > 3 ){
+ n += 1;
+ state->STATE -= 4;
+ }
+ Philox_State_Incr(state, n);
+ state->output = curand_Philox4x32_10(state->ctr,state->key);
+}
+
+/**
+ * \brief Update Philox4_32_10 state to skip ahead \p n subsequences.
+ *
+ * Update the Philox4_32_10 state in \p state to skip ahead \p n subsequences. Each
+ * subsequence is \xmlonly266\endxmlonly elements long, so this means the function will skip ahead
+ * \xmlonly266\endxmlonly * n elements.
+ *
+ * All values of \p n are valid.
+ *
+ * \param n - Number of subsequences to skip
+ * \param state - Pointer to state to update
+ */
+QUALIFIERS void skipahead_sequence(unsigned long long n, curandStatePhilox4_32_10_t *state)
+{
+ Philox_State_Incr_hi(state, n);
+ state->output = curand_Philox4x32_10(state->ctr,state->key);
+}
+
+/**
+ * \brief Initialize Philox4_32_10 state.
+ *
+ * Initialize Philox4_32_10 state in \p state with the given \p seed, p\ subsequence,
+ * and \p offset.
+ *
+ * All input values for \p seed, \p subseqence and \p offset are legal. Each of the
+ * \xmlonly264\endxmlonly possible
+ * values of seed selects an independent sequence of length
+ * \xmlonly2130\endxmlonly.
+ * The first
+ * \xmlonly266 * subsequence + offset\endxmlonly.
+ * values of the sequence are skipped.
+ * I.e., subsequences are of length
+ * \xmlonly266\endxmlonly.
+ *
+ * \param seed - Arbitrary bits to use as a seed
+ * \param subsequence - Subsequence to start at
+ * \param offset - Absolute offset into subsequence
+ * \param state - Pointer to state to initialize
+ */
+QUALIFIERS void curand_init(unsigned long long seed,
+ unsigned long long subsequence,
+ unsigned long long offset,
+ curandStatePhilox4_32_10_t *state)
+{
+ state->ctr = make_uint4(0, 0, 0, 0);
+ state->key.x = (unsigned int)seed;
+ state->key.y = (unsigned int)(seed>>32);
+ state->STATE = 0;
+ state->boxmuller_flag = 0;
+ state->boxmuller_flag_double = 0;
+ state->boxmuller_extra = 0.f;
+ state->boxmuller_extra_double = 0.;
+ skipahead_sequence(subsequence, state);
+ skipahead(offset, state);
+}
+
+
+/* MRG32k3a RNG */
+
+/* Base generator for MRG32k3a */
+QUALIFIERS unsigned long long __curand_umad(GCC_UNUSED_PARAMETER unsigned int a, GCC_UNUSED_PARAMETER unsigned int b, GCC_UNUSED_PARAMETER unsigned long long c)
+{
+ unsigned long long r = 0;
+NV_IF_TARGET(NV_PROVIDES_SM_61,
+ asm("mad.wide.u32 %0, %1, %2, %3;"
+ : "=l"(r) : "r"(a), "r"(b), "l"(c));
+)
+ return r;
+}
+QUALIFIERS unsigned long long __curand_umul(GCC_UNUSED_PARAMETER unsigned int a, GCC_UNUSED_PARAMETER unsigned int b)
+{
+ unsigned long long r = 0;
+NV_IF_TARGET(NV_PROVIDES_SM_61,
+ asm("mul.wide.u32 %0, %1, %2;"
+ : "=l"(r) : "r"(a), "r"(b));
+)
+ return r;
+}
+QUALIFIERS double curand_MRG32k3a (curandStateMRG32k3a_t *state)
+{
+NV_IF_TARGET(NV_PROVIDES_SM_61,
+ const unsigned int m1 = 4294967087u;
+ const unsigned int m2 = 4294944443u;
+ const unsigned int m1c = 209u;
+ const unsigned int m2c = 22853u;
+ const unsigned int a12 = 1403580u;
+ const unsigned int a13n = 810728u;
+ const unsigned int a21 = 527612u;
+ const unsigned int a23n = 1370589u;
+
+ unsigned long long p1;
+ unsigned long long p2;
+ const unsigned long long p3 = __curand_umul(a13n, m1 - state->s1[0]);
+ p1 = __curand_umad(a12, state->s1[1], p3);
+
+ // Putting addition inside and changing umul to umad
+ // slowed this function down on GV100
+ p1 = __curand_umul(p1 >> 32, m1c) + (p1 & 0xffffffff);
+ if (p1 >= m1) p1 -= m1;
+
+ state->s1[0] = state->s1[1]; state->s1[1] = state->s1[2]; state->s1[2] = p1;
+ const unsigned long long p4 = __curand_umul(a23n, m2 - state->s2[0]);
+ p2 = __curand_umad(a21, state->s2[2], p4);
+
+ // Putting addition inside and changing umul to umad
+ // slowed this function down on GV100
+ p2 = __curand_umul(p2 >> 32, m2c) + (p2 & 0xffffffff);
+ p2 = __curand_umul(p2 >> 32, m2c) + (p2 & 0xffffffff);
+ if (p2 >= m2) p2 -= m2;
+
+ state->s2[0] = state->s2[1]; state->s2[1] = state->s2[2]; state->s2[2] = p2;
+
+ const unsigned int p5 = (unsigned int)p1 - (unsigned int)p2;
+ if(p1 <= p2) return p5 + m1;
+ return p5;
+)
+NV_IF_TARGET(NV_IS_DEVICE,
+/* nj's implementation */
+ const double m1 = 4294967087.;
+ const double m2 = 4294944443.;
+ const double a12 = 1403580.;
+ const double a13n = 810728.;
+ const double a21 = 527612.;
+ const double a23n = 1370589.;
+
+ const double rh1 = 2.3283065498378290e-010; /* (1.0 / m1)__hi */
+ const double rl1 = -1.7354913086174288e-026; /* (1.0 / m1)__lo */
+ const double rh2 = 2.3283188252407387e-010; /* (1.0 / m2)__hi */
+ const double rl2 = 2.4081018096503646e-026; /* (1.0 / m2)__lo */
+
+ double q;
+ double p1;
+ double p2;
+ p1 = a12 * state->s1[1] - a13n * state->s1[0];
+ q = trunc (fma (p1, rh1, p1 * rl1));
+ p1 -= q * m1;
+ if (p1 < 0.0) p1 += m1;
+ state->s1[0] = state->s1[1]; state->s1[1] = state->s1[2]; state->s1[2] = (unsigned int)p1;
+ p2 = a21 * state->s2[2] - a23n * state->s2[0];
+ q = trunc (fma (p2, rh2, p2 * rl2));
+ p2 -= q * m2;
+ if (p2 < 0.0) p2 += m2;
+ state->s2[0] = state->s2[1]; state->s2[1] = state->s2[2]; state->s2[2] = (unsigned int)p2;
+ if (p1 <= p2) return (p1 - p2 + m1);
+ else return (p1 - p2);
+)
+/* end nj's implementation */
+ double p1;
+ double p2;
+ double r;
+ p1 = (MRG32K3A_A12 * state->s1[1]) - (MRG32K3A_A13N * state->s1[0]);
+ p1 = curand_MRGmod(p1, MRG32K3A_MOD1);
+ if (p1 < 0.0) p1 += MRG32K3A_MOD1;
+ state->s1[0] = state->s1[1];
+ state->s1[1] = state->s1[2];
+ state->s1[2] = (unsigned int)p1;
+ p2 = (MRG32K3A_A21 * state->s2[2]) - (MRG32K3A_A23N * state->s2[0]);
+ p2 = curand_MRGmod(p2, MRG32K3A_MOD2);
+ if (p2 < 0) p2 += MRG32K3A_MOD2;
+ state->s2[0] = state->s2[1];
+ state->s2[1] = state->s2[2];
+ state->s2[2] = (unsigned int)p2;
+ r = p1 - p2;
+ if (r <= 0) r += MRG32K3A_MOD1;
+ return r;
+}
+
+
+/**
+ * \brief Return 32-bits of pseudorandomness from an MRG32k3a generator.
+ *
+ * Return 32-bits of pseudorandomness from the MRG32k3a generator in \p state,
+ * increment position of generator by one.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 32-bits of pseudorandomness as an unsigned int, all bits valid to use.
+ */
+QUALIFIERS unsigned int curand(curandStateMRG32k3a_t *state)
+{
+ double dRet;
+ dRet = (double)curand_MRG32k3a(state)*(double)MRG32K3A_BITS_NORM;
+ return (unsigned int)dRet;
+}
+
+
+
+/**
+ * \brief Update MRG32k3a state to skip \p n elements.
+ *
+ * Update the MRG32k3a state in \p state to skip ahead \p n elements.
+ *
+ * All values of \p n are valid. Large values require more computation and so
+ * will take more time to complete.
+ *
+ * \param n - Number of elements to skip
+ * \param state - Pointer to state to update
+ */
+QUALIFIERS void skipahead(unsigned long long n, curandStateMRG32k3a_t *state)
+{
+ unsigned int t[3][3];
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ curand_MRGmatPow3x3( mrg32k3aM1, t, MRG32K3A_MOD1, n);
+ curand_MRGmatVecMul3x3( t, state->s1, MRG32K3A_MOD1);
+ curand_MRGmatPow3x3(mrg32k3aM2, t, MRG32K3A_MOD2, n);
+ curand_MRGmatVecMul3x3( t, state->s2, MRG32K3A_MOD2);
+,
+ curand_MRGmatPow3x3( mrg32k3aM1Host, t, MRG32K3A_MOD1, n);
+ curand_MRGmatVecMul3x3( t, state->s1, MRG32K3A_MOD1);
+ curand_MRGmatPow3x3(mrg32k3aM2Host, t, MRG32K3A_MOD2, n);
+ curand_MRGmatVecMul3x3( t, state->s2, MRG32K3A_MOD2);
+)
+}
+
+/**
+ * \brief Update MRG32k3a state to skip ahead \p n subsequences.
+ *
+ * Update the MRG32k3a state in \p state to skip ahead \p n subsequences. Each
+ * subsequence is \xmlonly2127\endxmlonly
+ *
+ * \xmlonly276\endxmlonly elements long, so this means the function will skip ahead
+ * \xmlonly267\endxmlonly * n elements.
+ *
+ * Valid values of \p n are 0 to \xmlonly251\endxmlonly. Note \p n will be masked to 51 bits
+ *
+ * \param n - Number of subsequences to skip
+ * \param state - Pointer to state to update
+ */
+QUALIFIERS void skipahead_subsequence(unsigned long long n, curandStateMRG32k3a_t *state)
+{
+ unsigned int t[3][3];
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ curand_MRGmatPow3x3( mrg32k3aM1SubSeq, t, MRG32K3A_MOD1, n);
+ curand_MRGmatVecMul3x3( t, state->s1, MRG32K3A_MOD1);
+ curand_MRGmatPow3x3( mrg32k3aM2SubSeq, t, MRG32K3A_MOD2, n);
+ curand_MRGmatVecMul3x3( t, state->s2, MRG32K3A_MOD2);
+,
+ curand_MRGmatPow3x3( mrg32k3aM1SubSeqHost, t, MRG32K3A_MOD1, n);
+ curand_MRGmatVecMul3x3( t, state->s1, MRG32K3A_MOD1);
+ curand_MRGmatPow3x3( mrg32k3aM2SubSeqHost, t, MRG32K3A_MOD2, n);
+ curand_MRGmatVecMul3x3( t, state->s2, MRG32K3A_MOD2);
+)
+}
+
+/**
+ * \brief Update MRG32k3a state to skip ahead \p n sequences.
+ *
+ * Update the MRG32k3a state in \p state to skip ahead \p n sequences. Each
+ * sequence is \xmlonly2127\endxmlonly elements long, so this means the function will skip ahead
+ * \xmlonly2127\endxmlonly * n elements.
+ *
+ * All values of \p n are valid. Large values require more computation and so
+ * will take more time to complete.
+ *
+ * \param n - Number of sequences to skip
+ * \param state - Pointer to state to update
+ */
+QUALIFIERS void skipahead_sequence(unsigned long long n, curandStateMRG32k3a_t *state)
+{
+ unsigned int t[3][3];
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ curand_MRGmatPow3x3( mrg32k3aM1Seq, t, MRG32K3A_MOD1, n);
+ curand_MRGmatVecMul3x3( t, state->s1, MRG32K3A_MOD1);
+ curand_MRGmatPow3x3( mrg32k3aM2Seq, t, MRG32K3A_MOD2, n);
+ curand_MRGmatVecMul3x3( t, state->s2, MRG32K3A_MOD2);
+,
+ curand_MRGmatPow3x3( mrg32k3aM1SeqHost, t, MRG32K3A_MOD1, n);
+ curand_MRGmatVecMul3x3( t, state->s1, MRG32K3A_MOD1);
+ curand_MRGmatPow3x3( mrg32k3aM2SeqHost, t, MRG32K3A_MOD2, n);
+ curand_MRGmatVecMul3x3( t, state->s2, MRG32K3A_MOD2);
+)
+}
+
+
+/**
+ * \brief Initialize MRG32k3a state.
+ *
+ * Initialize MRG32k3a state in \p state with the given \p seed, \p subsequence,
+ * and \p offset.
+ *
+ * All input values of \p seed, \p subsequence, and \p offset are legal.
+ * \p subsequence will be truncated to 51 bits to avoid running into the next sequence
+ *
+ * A value of 0 for \p seed sets the state to the values of the original
+ * published version of the \p MRG32k3a algorithm.
+ *
+ * \param seed - Arbitrary bits to use as a seed
+ * \param subsequence - Subsequence to start at
+ * \param offset - Absolute offset into sequence
+ * \param state - Pointer to state to initialize
+ */
+QUALIFIERS void curand_init(unsigned long long seed,
+ unsigned long long subsequence,
+ unsigned long long offset,
+ curandStateMRG32k3a_t *state)
+{
+ int i;
+ for ( i=0; i<3; i++ ) {
+ state->s1[i] = 12345u;
+ state->s2[i] = 12345u;
+ }
+ if (seed != 0ull) {
+ unsigned int x1 = ((unsigned int)seed) ^ 0x55555555UL;
+ unsigned int x2 = (unsigned int)((seed >> 32) ^ 0xAAAAAAAAUL);
+ state->s1[0] = (unsigned int)curand_MRGmodMul(x1, state->s1[0], MRG32K3A_MOD1);
+ state->s1[1] = (unsigned int)curand_MRGmodMul(x2, state->s1[1], MRG32K3A_MOD1);
+ state->s1[2] = (unsigned int)curand_MRGmodMul(x1, state->s1[2], MRG32K3A_MOD1);
+ state->s2[0] = (unsigned int)curand_MRGmodMul(x2, state->s2[0], MRG32K3A_MOD2);
+ state->s2[1] = (unsigned int)curand_MRGmodMul(x1, state->s2[1], MRG32K3A_MOD2);
+ state->s2[2] = (unsigned int)curand_MRGmodMul(x2, state->s2[2], MRG32K3A_MOD2);
+ }
+ skipahead_subsequence( subsequence, state );
+ skipahead( offset, state );
+ state->boxmuller_flag = 0;
+ state->boxmuller_flag_double = 0;
+ state->boxmuller_extra = 0.f;
+ state->boxmuller_extra_double = 0.;
+}
+
+/**
+ * \brief Update Sobol32 state to skip \p n elements.
+ *
+ * Update the Sobol32 state in \p state to skip ahead \p n elements.
+ *
+ * All values of \p n are valid.
+ *
+ * \param n - Number of elements to skip
+ * \param state - Pointer to state to update
+ */
+template
+QUALIFIERS
+typename CURAND_STD::enable_if::value || CURAND_STD::is_same::value>::type
+skipahead(unsigned int n, T state)
+{
+ unsigned int i_gray;
+ state->x = state->c;
+ state->i += n;
+ /* Convert state->i to gray code */
+ i_gray = state->i ^ (state->i >> 1);
+ for(unsigned int k = 0; k < 32; k++) {
+ if(i_gray & (1 << k)) {
+ state->x ^= state->direction_vectors[k];
+ }
+ }
+ return;
+}
+
+/**
+ * \brief Update Sobol64 state to skip \p n elements.
+ *
+ * Update the Sobol64 state in \p state to skip ahead \p n elements.
+ *
+ * All values of \p n are valid.
+ *
+ * \param n - Number of elements to skip
+ * \param state - Pointer to state to update
+ */
+template
+QUALIFIERS
+typename CURAND_STD::enable_if::value || CURAND_STD::is_same::value>::type
+skipahead(unsigned long long n, T state)
+{
+ unsigned long long i_gray;
+ state->x = state->c;
+ state->i += n;
+ /* Convert state->i to gray code */
+ i_gray = state->i ^ (state->i >> 1);
+ for(unsigned k = 0; k < 64; k++) {
+ if(i_gray & (1ULL << k)) {
+ state->x ^= state->direction_vectors[k];
+ }
+ }
+ return;
+}
+
+/**
+ * \brief Initialize Sobol32 state.
+ *
+ * Initialize Sobol32 state in \p state with the given \p direction \p vectors and
+ * \p offset.
+ *
+ * The direction vector is a device pointer to an array of 32 unsigned ints.
+ * All input values of \p offset are legal.
+ *
+ * \param direction_vectors - Pointer to array of 32 unsigned ints representing the
+ * direction vectors for the desired dimension
+ * \param offset - Absolute offset into sequence
+ * \param state - Pointer to state to initialize
+ */
+QUALIFIERS void curand_init(curandDirectionVectors32_t direction_vectors,
+ unsigned int offset,
+ curandStateSobol32_t *state)
+{
+ state->i = 0;
+ state->c = 0;
+ for(int i = 0; i < 32; i++) {
+ state->direction_vectors[i] = direction_vectors[i];
+ }
+ state->x = 0;
+ skipahead(offset, state);
+}
+/**
+ * \brief Initialize Scrambled Sobol32 state.
+ *
+ * Initialize Sobol32 state in \p state with the given \p direction \p vectors and
+ * \p offset.
+ *
+ * The direction vector is a device pointer to an array of 32 unsigned ints.
+ * All input values of \p offset are legal.
+ *
+ * \param direction_vectors - Pointer to array of 32 unsigned ints representing the
+ direction vectors for the desired dimension
+ * \param scramble_c Scramble constant
+ * \param offset - Absolute offset into sequence
+ * \param state - Pointer to state to initialize
+ */
+QUALIFIERS void curand_init(curandDirectionVectors32_t direction_vectors,
+ unsigned int scramble_c,
+ unsigned int offset,
+ curandStateScrambledSobol32_t *state)
+{
+ state->i = 0;
+ state->c = scramble_c;
+ for(int i = 0; i < 32; i++) {
+ state->direction_vectors[i] = direction_vectors[i];
+ }
+ state->x = state->c;
+ skipahead(offset, state);
+}
+
+QUALIFIERS int __curand_find_trailing_zero(unsigned int x)
+{
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ int y = __ffs(~x);
+ if(y)
+ return y - 1;
+ return 31;
+,
+ int i = 1;
+ while(x & 1) {
+ i++;
+ x >>= 1;
+ }
+ i = i - 1;
+ return i == 32 ? 31 : i;
+)
+}
+
+QUALIFIERS int __curand_find_trailing_zero(unsigned long long x)
+{
+NV_IF_ELSE_TARGET(NV_IS_DEVICE,
+ int y = __ffsll(~x);
+ if(y)
+ return y - 1;
+ return 63;
+,
+ int i = 1;
+ while(x & 1) {
+ i++;
+ x >>= 1;
+ }
+ i = i - 1;
+ return i == 64 ? 63 : i;
+)
+}
+
+/**
+ * \brief Initialize Sobol64 state.
+ *
+ * Initialize Sobol64 state in \p state with the given \p direction \p vectors and
+ * \p offset.
+ *
+ * The direction vector is a device pointer to an array of 64 unsigned long longs.
+ * All input values of \p offset are legal.
+ *
+ * \param direction_vectors - Pointer to array of 64 unsigned long longs representing the
+ direction vectors for the desired dimension
+ * \param offset - Absolute offset into sequence
+ * \param state - Pointer to state to initialize
+ */
+QUALIFIERS void curand_init(curandDirectionVectors64_t direction_vectors,
+ unsigned long long offset,
+ curandStateSobol64_t *state)
+{
+ state->i = 0;
+ state->c = 0;
+ for(int i = 0; i < 64; i++) {
+ state->direction_vectors[i] = direction_vectors[i];
+ }
+ state->x = 0;
+ skipahead(offset, state);
+}
+
+/**
+ * \brief Initialize Scrambled Sobol64 state.
+ *
+ * Initialize Sobol64 state in \p state with the given \p direction \p vectors and
+ * \p offset.
+ *
+ * The direction vector is a device pointer to an array of 64 unsigned long longs.
+ * All input values of \p offset are legal.
+ *
+ * \param direction_vectors - Pointer to array of 64 unsigned long longs representing the
+ direction vectors for the desired dimension
+ * \param scramble_c Scramble constant
+ * \param offset - Absolute offset into sequence
+ * \param state - Pointer to state to initialize
+ */
+QUALIFIERS void curand_init(curandDirectionVectors64_t direction_vectors,
+ unsigned long long scramble_c,
+ unsigned long long offset,
+ curandStateScrambledSobol64_t *state)
+{
+ state->i = 0;
+ state->c = scramble_c;
+ for(int i = 0; i < 64; i++) {
+ state->direction_vectors[i] = direction_vectors[i];
+ }
+ state->x = state->c;
+ skipahead(offset, state);
+}
+
+/**
+ * \brief Return 32-bits of quasirandomness from a Sobol32 generator.
+ *
+ * Return 32-bits of quasirandomness from the Sobol32 generator in \p state,
+ * increment position of generator by one.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 32-bits of quasirandomness as an unsigned int, all bits valid to use.
+ */
+
+QUALIFIERS unsigned int curand(curandStateSobol32_t * state)
+{
+ /* Moving from i to i+1 element in gray code is flipping one bit,
+ the trailing zero bit of i
+ */
+ unsigned int res = state->x;
+ state->x ^= state->direction_vectors[__curand_find_trailing_zero(state->i)];
+ state->i ++;
+ return res;
+}
+
+/**
+ * \brief Return 32-bits of quasirandomness from a scrambled Sobol32 generator.
+ *
+ * Return 32-bits of quasirandomness from the scrambled Sobol32 generator in \p state,
+ * increment position of generator by one.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 32-bits of quasirandomness as an unsigned int, all bits valid to use.
+ */
+
+QUALIFIERS unsigned int curand(curandStateScrambledSobol32_t * state)
+{
+ /* Moving from i to i+1 element in gray code is flipping one bit,
+ the trailing zero bit of i
+ */
+ unsigned int res = state->x;
+ state->x ^= state->direction_vectors[__curand_find_trailing_zero(state->i)];
+ state->i ++;
+ return res;
+}
+
+/**
+ * \brief Return 64-bits of quasirandomness from a Sobol64 generator.
+ *
+ * Return 64-bits of quasirandomness from the Sobol64 generator in \p state,
+ * increment position of generator by one.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 64-bits of quasirandomness as an unsigned long long, all bits valid to use.
+ */
+
+QUALIFIERS unsigned long long curand(curandStateSobol64_t * state)
+{
+ /* Moving from i to i+1 element in gray code is flipping one bit,
+ the trailing zero bit of i
+ */
+ unsigned long long res = state->x;
+ state->x ^= state->direction_vectors[__curand_find_trailing_zero(state->i)];
+ state->i ++;
+ return res;
+}
+
+/**
+ * \brief Return 64-bits of quasirandomness from a scrambled Sobol64 generator.
+ *
+ * Return 64-bits of quasirandomness from the scrambled Sobol32 generator in \p state,
+ * increment position of generator by one.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 64-bits of quasirandomness as an unsigned long long, all bits valid to use.
+ */
+
+QUALIFIERS unsigned long long curand(curandStateScrambledSobol64_t * state)
+{
+ /* Moving from i to i+1 element in gray code is flipping one bit,
+ the trailing zero bit of i
+ */
+ unsigned long long res = state->x;
+ state->x ^= state->direction_vectors[__curand_find_trailing_zero(state->i)];
+ state->i ++;
+ return res;
+}
+
+#include "curand_uniform.h"
+#include "curand_normal.h"
+#include "curand_lognormal.h"
+#include "curand_poisson.h"
+#include "curand_discrete2.h"
+
+__device__ static inline unsigned int *__get_precalculated_matrix(int n)
+{
+ if(n == 0) {
+ return precalc_xorwow_matrix[n];
+ }
+ if(n == 2) {
+ return precalc_xorwow_offset_matrix[n];
+ }
+ return precalc_xorwow_matrix[n];
+}
+
+#ifndef __CUDACC_RTC__
+__host__ static inline unsigned int *__get_precalculated_matrix_host(int n)
+{
+ if(n == 1) {
+ return precalc_xorwow_matrix_host[n];
+ }
+ if(n == 3) {
+ return precalc_xorwow_offset_matrix_host[n];
+ }
+ return precalc_xorwow_matrix_host[n];
+}
+#endif // #ifndef __CUDACC_RTC__
+
+__device__ static inline unsigned int *__get_mrg32k3a_matrix(int n)
+{
+ if(n == 0) {
+ return mrg32k3aM1[n][0];
+ }
+ if(n == 2) {
+ return mrg32k3aM2[n][0];
+ }
+ if(n == 4) {
+ return mrg32k3aM1SubSeq[n][0];
+ }
+ if(n == 6) {
+ return mrg32k3aM2SubSeq[n][0];
+ }
+ if(n == 8) {
+ return mrg32k3aM1Seq[n][0];
+ }
+ if(n == 10) {
+ return mrg32k3aM2Seq[n][0];
+ }
+ return mrg32k3aM1[n][0];
+}
+
+#ifndef __CUDACC_RTC__
+__host__ static inline unsigned int *__get_mrg32k3a_matrix_host(int n)
+{
+ if(n == 1) {
+ return mrg32k3aM1Host[n][0];
+ }
+ if(n == 3) {
+ return mrg32k3aM2Host[n][0];
+ }
+ if(n == 5) {
+ return mrg32k3aM1SubSeqHost[n][0];
+ }
+ if(n == 7) {
+ return mrg32k3aM2SubSeqHost[n][0];
+ }
+ if(n == 9) {
+ return mrg32k3aM1SeqHost[n][0];
+ }
+ if(n == 11) {
+ return mrg32k3aM2SeqHost[n][0];
+ }
+ return mrg32k3aM1Host[n][0];
+}
+
+__host__ static inline double *__get__cr_lgamma_table_host(void) {
+ return __cr_lgamma_table;
+}
+#endif // #ifndef __CUDACC_RTC__
+
+/** @} */
+
+#endif // !defined(CURAND_KERNEL_H_)
diff --git a/videollama2/lib/python3.10/site-packages/nvidia/curand/include/curand_uniform.h b/videollama2/lib/python3.10/site-packages/nvidia/curand/include/curand_uniform.h
new file mode 100644
index 0000000000000000000000000000000000000000..7a4af8afa328c186d9ea33a8c8226e19aba4793e
--- /dev/null
+++ b/videollama2/lib/python3.10/site-packages/nvidia/curand/include/curand_uniform.h
@@ -0,0 +1,498 @@
+
+ /* Copyright 2010-2018 NVIDIA Corporation. All rights reserved.
+ *
+ * NOTICE TO LICENSEE:
+ *
+ * The source code and/or documentation ("Licensed Deliverables") are
+ * subject to NVIDIA intellectual property rights under U.S. and
+ * international Copyright laws.
+ *
+ * The Licensed Deliverables contained herein are PROPRIETARY and
+ * CONFIDENTIAL to NVIDIA and are being provided under the terms and
+ * conditions of a form of NVIDIA software license agreement by and
+ * between NVIDIA and Licensee ("License Agreement") or electronically
+ * accepted by Licensee. Notwithstanding any terms or conditions to
+ * the contrary in the License Agreement, reproduction or disclosure
+ * of the Licensed Deliverables to any third party without the express
+ * written consent of NVIDIA is prohibited.
+ *
+ * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
+ * LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
+ * SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE. THEY ARE
+ * PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
+ * NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
+ * DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
+ * NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
+ * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
+ * LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
+ * SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
+ * DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+ * WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ * ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
+ * OF THESE LICENSED DELIVERABLES.
+ *
+ * U.S. Government End Users. These Licensed Deliverables are a
+ * "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
+ * 1995), consisting of "commercial computer software" and "commercial
+ * computer software documentation" as such terms are used in 48
+ * C.F.R. 12.212 (SEPT 1995) and are provided to the U.S. Government
+ * only as a commercial end item. Consistent with 48 C.F.R.12.212 and
+ * 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
+ * U.S. Government End Users acquire the Licensed Deliverables with
+ * only those rights set forth herein.
+ *
+ * Any use of the Licensed Deliverables in individual and commercial
+ * software must include, in the user documentation and internal
+ * comments to the code, the above Disclaimer and U.S. Government End
+ * Users Notice.
+ */
+
+
+#if !defined(CURAND_UNIFORM_H_)
+#define CURAND_UNIFORM_H_
+
+/**
+ * \defgroup DEVICE Device API
+ *
+ * @{
+ */
+
+#ifndef __CUDACC_RTC__
+#include
+#endif // __CUDACC_RTC__
+
+#include "curand_mrg32k3a.h"
+#include "curand_mtgp32_kernel.h"
+#include "curand_philox4x32_x.h"
+
+
+QUALIFIERS float _curand_uniform(unsigned int x)
+{
+ return x * CURAND_2POW32_INV + (CURAND_2POW32_INV/2.0f);
+}
+
+QUALIFIERS float4 _curand_uniform4(uint4 x)
+{
+ float4 y;
+ y.x = x.x * CURAND_2POW32_INV + (CURAND_2POW32_INV/2.0f);
+ y.y = x.y * CURAND_2POW32_INV + (CURAND_2POW32_INV/2.0f);
+ y.z = x.z * CURAND_2POW32_INV + (CURAND_2POW32_INV/2.0f);
+ y.w = x.w * CURAND_2POW32_INV + (CURAND_2POW32_INV/2.0f);
+ return y;
+}
+
+QUALIFIERS float _curand_uniform(unsigned long long x)
+{
+ unsigned int t;
+ t = (unsigned int)(x >> 32);
+ return t * CURAND_2POW32_INV + (CURAND_2POW32_INV/2.0f);
+}
+
+QUALIFIERS double _curand_uniform_double(unsigned int x)
+{
+ return x * CURAND_2POW32_INV_DOUBLE + CURAND_2POW32_INV_DOUBLE;
+}
+
+QUALIFIERS double _curand_uniform_double(unsigned long long x)
+{
+ return (x >> 11) * CURAND_2POW53_INV_DOUBLE + (CURAND_2POW53_INV_DOUBLE/2.0);
+}
+
+QUALIFIERS double _curand_uniform_double_hq(unsigned int x, unsigned int y)
+{
+ unsigned long long z = (unsigned long long)x ^
+ ((unsigned long long)y << (53 - 32));
+ return z * CURAND_2POW53_INV_DOUBLE + (CURAND_2POW53_INV_DOUBLE/2.0);
+}
+
+QUALIFIERS float curand_uniform(curandStateTest_t *state)
+{
+ return _curand_uniform(curand(state));
+}
+
+QUALIFIERS double curand_uniform_double(curandStateTest_t *state)
+{
+ return _curand_uniform_double(curand(state));
+}
+
+/**
+ * \brief Return a uniformly distributed float from an XORWOW generator.
+ *
+ * Return a uniformly distributed float between \p 0.0f and \p 1.0f
+ * from the XORWOW generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation may use any number of calls to \p curand() to
+ * get enough random bits to create the return value. The current
+ * implementation uses one call.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0f and \p 1.0f
+ */
+QUALIFIERS float curand_uniform(curandStateXORWOW_t *state)
+{
+ return _curand_uniform(curand(state));
+}
+
+/**
+ * \brief Return a uniformly distributed double from an XORWOW generator.
+ *
+ * Return a uniformly distributed double between \p 0.0 and \p 1.0
+ * from the XORWOW generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0 but includes \p 1.0. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation may use any number of calls to \p curand() to
+ * get enough random bits to create the return value. The current
+ * implementation uses exactly two calls.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed double between \p 0.0 and \p 1.0
+ */
+QUALIFIERS double curand_uniform_double(curandStateXORWOW_t *state)
+{
+ unsigned int x, y;
+ x = curand(state);
+ y = curand(state);
+ return _curand_uniform_double_hq(x, y);
+}
+/**
+ * \brief Return a uniformly distributed float from an MRG32k3a generator.
+ *
+ * Return a uniformly distributed float between \p 0.0f and \p 1.0f
+ * from the MRG32k3a generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation returns up to 23 bits of mantissa, with the minimum
+ * return value \f$ 2^{-32} \f$
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0f and \p 1.0f
+ */
+QUALIFIERS float curand_uniform(curandStateMRG32k3a_t *state)
+{
+ return ((float)(curand_MRG32k3a(state)*MRG32K3A_NORM));
+}
+
+/**
+ * \brief Return a uniformly distributed double from an MRG32k3a generator.
+ *
+ * Return a uniformly distributed double between \p 0.0 and \p 1.0
+ * from the MRG32k3a generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0 but includes \p 1.0. Denormalized floating
+ * point outputs are never returned.
+ *
+ * Note the implementation returns at most 32 random bits of mantissa as
+ * outlined in the seminal paper by L'Ecuyer.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed double between \p 0.0 and \p 1.0
+ */
+QUALIFIERS double curand_uniform_double(curandStateMRG32k3a_t *state)
+{
+ return curand_MRG32k3a(state)*MRG32K3A_NORM;
+}
+
+
+
+/**
+ * \brief Return a uniformly distributed tuple of 2 doubles from an Philox4_32_10 generator.
+ *
+ * Return a uniformly distributed 2 doubles (double4) between \p 0.0 and \p 1.0
+ * from the Philox4_32_10 generator in \p state, increment position of generator by 4.
+ * Output range excludes \p 0.0 but includes \p 1.0. Denormalized floating
+ * point outputs are never returned.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return 2 uniformly distributed doubles between \p 0.0 and \p 1.0
+ */
+
+QUALIFIERS double2 curand_uniform2_double(curandStatePhilox4_32_10_t *state)
+{
+ uint4 _x;
+ double2 result;
+ _x = curand4(state);
+ result.x = _curand_uniform_double_hq(_x.x,_x.y);
+ result.y = _curand_uniform_double_hq(_x.z,_x.w);
+ return result;
+}
+
+
+// not a part of API
+QUALIFIERS double4 curand_uniform4_double(curandStatePhilox4_32_10_t *state)
+{
+ uint4 _x, _y;
+ double4 result;
+ _x = curand4(state);
+ _y = curand4(state);
+ result.x = _curand_uniform_double_hq(_x.x,_x.y);
+ result.y = _curand_uniform_double_hq(_x.z,_x.w);
+ result.z = _curand_uniform_double_hq(_y.x,_y.y);
+ result.w = _curand_uniform_double_hq(_y.z,_y.w);
+ return result;
+}
+
+/**
+ * \brief Return a uniformly distributed float from a Philox4_32_10 generator.
+ *
+ * Return a uniformly distributed float between \p 0.0f and \p 1.0f
+ * from the Philox4_32_10 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0 and \p 1.0
+ *
+ */
+QUALIFIERS float curand_uniform(curandStatePhilox4_32_10_t *state)
+{
+ return _curand_uniform(curand(state));
+}
+
+/**
+ * \brief Return a uniformly distributed tuple of 4 floats from a Philox4_32_10 generator.
+ *
+ * Return a uniformly distributed 4 floats between \p 0.0f and \p 1.0f
+ * from the Philox4_32_10 generator in \p state, increment position of generator by 4.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0 and \p 1.0
+ *
+ */
+QUALIFIERS float4 curand_uniform4(curandStatePhilox4_32_10_t *state)
+{
+ return _curand_uniform4(curand4(state));
+}
+
+/**
+ * \brief Return a uniformly distributed float from a MTGP32 generator.
+ *
+ * Return a uniformly distributed float between \p 0.0f and \p 1.0f
+ * from the MTGP32 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0f and \p 1.0f
+ */
+QUALIFIERS float curand_uniform(curandStateMtgp32_t *state)
+{
+ return _curand_uniform(curand(state));
+}
+/**
+ * \brief Return a uniformly distributed double from a MTGP32 generator.
+ *
+ * Return a uniformly distributed double between \p 0.0f and \p 1.0f
+ * from the MTGP32 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * Note that the implementation uses only 32 random bits to generate a single double
+ * precision value.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed double between \p 0.0f and \p 1.0f
+ */
+QUALIFIERS double curand_uniform_double(curandStateMtgp32_t *state)
+{
+ return _curand_uniform_double(curand(state));
+}
+
+/**
+ * \brief Return a uniformly distributed double from a Philox4_32_10 generator.
+ *
+ * Return a uniformly distributed double between \p 0.0f and \p 1.0f
+ * from the Philox4_32_10 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * Note that the implementation uses only 32 random bits to generate a single double
+ * precision value.
+ *
+ * \p curand_uniform2_double() is recommended for higher quality uniformly distributed
+ * double precision values.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed double between \p 0.0f and \p 1.0f
+ */
+
+QUALIFIERS double curand_uniform_double(curandStatePhilox4_32_10_t *state)
+{
+ return _curand_uniform_double(curand(state));
+}
+
+
+/**
+ * \brief Return a uniformly distributed float from a Sobol32 generator.
+ *
+ * Return a uniformly distributed float between \p 0.0f and \p 1.0f
+ * from the Sobol32 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation is guaranteed to use a single call to \p curand().
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0f and \p 1.0f
+ */
+QUALIFIERS float curand_uniform(curandStateSobol32_t *state)
+{
+ return _curand_uniform(curand(state));
+}
+
+/**
+ * \brief Return a uniformly distributed double from a Sobol32 generator.
+ *
+ * Return a uniformly distributed double between \p 0.0 and \p 1.0
+ * from the Sobol32 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0 but includes \p 1.0. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation is guaranteed to use a single call to \p curand()
+ * to preserve the quasirandom properties of the sequence.
+ *
+ * Note that the implementation uses only 32 random bits to generate a single double
+ * precision value.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed double between \p 0.0 and \p 1.0
+ */
+QUALIFIERS double curand_uniform_double(curandStateSobol32_t *state)
+{
+ return _curand_uniform_double(curand(state));
+}
+/**
+ * \brief Return a uniformly distributed float from a scrambled Sobol32 generator.
+ *
+ * Return a uniformly distributed float between \p 0.0f and \p 1.0f
+ * from the scrambled Sobol32 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation is guaranteed to use a single call to \p curand().
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0f and \p 1.0f
+ */
+QUALIFIERS float curand_uniform(curandStateScrambledSobol32_t *state)
+{
+ return _curand_uniform(curand(state));
+}
+
+/**
+ * \brief Return a uniformly distributed double from a scrambled Sobol32 generator.
+ *
+ * Return a uniformly distributed double between \p 0.0 and \p 1.0
+ * from the scrambled Sobol32 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0 but includes \p 1.0. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation is guaranteed to use a single call to \p curand()
+ * to preserve the quasirandom properties of the sequence.
+ *
+ * Note that the implementation uses only 32 random bits to generate a single double
+ * precision value.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed double between \p 0.0 and \p 1.0
+ */
+QUALIFIERS double curand_uniform_double(curandStateScrambledSobol32_t *state)
+{
+ return _curand_uniform_double(curand(state));
+}
+/**
+ * \brief Return a uniformly distributed float from a Sobol64 generator.
+ *
+ * Return a uniformly distributed float between \p 0.0f and \p 1.0f
+ * from the Sobol64 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation is guaranteed to use a single call to \p curand().
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0f and \p 1.0f
+ */
+QUALIFIERS float curand_uniform(curandStateSobol64_t *state)
+{
+ return _curand_uniform(curand(state));
+}
+
+/**
+ * \brief Return a uniformly distributed double from a Sobol64 generator.
+ *
+ * Return a uniformly distributed double between \p 0.0 and \p 1.0
+ * from the Sobol64 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0 but includes \p 1.0. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation is guaranteed to use a single call to \p curand()
+ * to preserve the quasirandom properties of the sequence.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed double between \p 0.0 and \p 1.0
+ */
+QUALIFIERS double curand_uniform_double(curandStateSobol64_t *state)
+{
+ return _curand_uniform_double(curand(state));
+}
+/**
+ * \brief Return a uniformly distributed float from a scrambled Sobol64 generator.
+ *
+ * Return a uniformly distributed float between \p 0.0f and \p 1.0f
+ * from the scrambled Sobol64 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0f but includes \p 1.0f. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation is guaranteed to use a single call to \p curand().
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed float between \p 0.0f and \p 1.0f
+ */
+QUALIFIERS float curand_uniform(curandStateScrambledSobol64_t *state)
+{
+ return _curand_uniform(curand(state));
+}
+
+/**
+ * \brief Return a uniformly distributed double from a scrambled Sobol64 generator.
+ *
+ * Return a uniformly distributed double between \p 0.0 and \p 1.0
+ * from the scrambled Sobol64 generator in \p state, increment position of generator.
+ * Output range excludes \p 0.0 but includes \p 1.0. Denormalized floating
+ * point outputs are never returned.
+ *
+ * The implementation is guaranteed to use a single call to \p curand()
+ * to preserve the quasirandom properties of the sequence.
+ *
+ * \param state - Pointer to state to update
+ *
+ * \return uniformly distributed double between \p 0.0 and \p 1.0
+ */
+QUALIFIERS double curand_uniform_double(curandStateScrambledSobol64_t *state)
+{
+ return _curand_uniform_double(curand(state));
+}
+
+#endif // !defined(CURAND_UNIFORM_H_)
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/common.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..69120160699c24cc86670522f84ec6c7014c20ee
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/common.py
@@ -0,0 +1,563 @@
+"""
+Module consolidating common testing functions for checking plotting.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import numpy as np
+
+from pandas.core.dtypes.api import is_list_like
+
+import pandas as pd
+from pandas import Series
+import pandas._testing as tm
+
+if TYPE_CHECKING:
+ from collections.abc import Sequence
+
+ from matplotlib.axes import Axes
+
+
+def _check_legend_labels(axes, labels=None, visible=True):
+ """
+ Check each axes has expected legend labels
+
+ Parameters
+ ----------
+ axes : matplotlib Axes object, or its list-like
+ labels : list-like
+ expected legend labels
+ visible : bool
+ expected legend visibility. labels are checked only when visible is
+ True
+ """
+ if visible and (labels is None):
+ raise ValueError("labels must be specified when visible is True")
+ axes = _flatten_visible(axes)
+ for ax in axes:
+ if visible:
+ assert ax.get_legend() is not None
+ _check_text_labels(ax.get_legend().get_texts(), labels)
+ else:
+ assert ax.get_legend() is None
+
+
+def _check_legend_marker(ax, expected_markers=None, visible=True):
+ """
+ Check ax has expected legend markers
+
+ Parameters
+ ----------
+ ax : matplotlib Axes object
+ expected_markers : list-like
+ expected legend markers
+ visible : bool
+ expected legend visibility. labels are checked only when visible is
+ True
+ """
+ if visible and (expected_markers is None):
+ raise ValueError("Markers must be specified when visible is True")
+ if visible:
+ handles, _ = ax.get_legend_handles_labels()
+ markers = [handle.get_marker() for handle in handles]
+ assert markers == expected_markers
+ else:
+ assert ax.get_legend() is None
+
+
+def _check_data(xp, rs):
+ """
+ Check each axes has identical lines
+
+ Parameters
+ ----------
+ xp : matplotlib Axes object
+ rs : matplotlib Axes object
+ """
+ import matplotlib.pyplot as plt
+
+ xp_lines = xp.get_lines()
+ rs_lines = rs.get_lines()
+
+ assert len(xp_lines) == len(rs_lines)
+ for xpl, rsl in zip(xp_lines, rs_lines):
+ xpdata = xpl.get_xydata()
+ rsdata = rsl.get_xydata()
+ tm.assert_almost_equal(xpdata, rsdata)
+
+ plt.close("all")
+
+
+def _check_visible(collections, visible=True):
+ """
+ Check each artist is visible or not
+
+ Parameters
+ ----------
+ collections : matplotlib Artist or its list-like
+ target Artist or its list or collection
+ visible : bool
+ expected visibility
+ """
+ from matplotlib.collections import Collection
+
+ if not isinstance(collections, Collection) and not is_list_like(collections):
+ collections = [collections]
+
+ for patch in collections:
+ assert patch.get_visible() == visible
+
+
+def _check_patches_all_filled(axes: Axes | Sequence[Axes], filled: bool = True) -> None:
+ """
+ Check for each artist whether it is filled or not
+
+ Parameters
+ ----------
+ axes : matplotlib Axes object, or its list-like
+ filled : bool
+ expected filling
+ """
+
+ axes = _flatten_visible(axes)
+ for ax in axes:
+ for patch in ax.patches:
+ assert patch.fill == filled
+
+
+def _get_colors_mapped(series, colors):
+ unique = series.unique()
+ # unique and colors length can be differed
+ # depending on slice value
+ mapped = dict(zip(unique, colors))
+ return [mapped[v] for v in series.values]
+
+
+def _check_colors(collections, linecolors=None, facecolors=None, mapping=None):
+ """
+ Check each artist has expected line colors and face colors
+
+ Parameters
+ ----------
+ collections : list-like
+ list or collection of target artist
+ linecolors : list-like which has the same length as collections
+ list of expected line colors
+ facecolors : list-like which has the same length as collections
+ list of expected face colors
+ mapping : Series
+ Series used for color grouping key
+ used for andrew_curves, parallel_coordinates, radviz test
+ """
+ from matplotlib import colors
+ from matplotlib.collections import (
+ Collection,
+ LineCollection,
+ PolyCollection,
+ )
+ from matplotlib.lines import Line2D
+
+ conv = colors.ColorConverter
+ if linecolors is not None:
+ if mapping is not None:
+ linecolors = _get_colors_mapped(mapping, linecolors)
+ linecolors = linecolors[: len(collections)]
+
+ assert len(collections) == len(linecolors)
+ for patch, color in zip(collections, linecolors):
+ if isinstance(patch, Line2D):
+ result = patch.get_color()
+ # Line2D may contains string color expression
+ result = conv.to_rgba(result)
+ elif isinstance(patch, (PolyCollection, LineCollection)):
+ result = tuple(patch.get_edgecolor()[0])
+ else:
+ result = patch.get_edgecolor()
+
+ expected = conv.to_rgba(color)
+ assert result == expected
+
+ if facecolors is not None:
+ if mapping is not None:
+ facecolors = _get_colors_mapped(mapping, facecolors)
+ facecolors = facecolors[: len(collections)]
+
+ assert len(collections) == len(facecolors)
+ for patch, color in zip(collections, facecolors):
+ if isinstance(patch, Collection):
+ # returned as list of np.array
+ result = patch.get_facecolor()[0]
+ else:
+ result = patch.get_facecolor()
+
+ if isinstance(result, np.ndarray):
+ result = tuple(result)
+
+ expected = conv.to_rgba(color)
+ assert result == expected
+
+
+def _check_text_labels(texts, expected):
+ """
+ Check each text has expected labels
+
+ Parameters
+ ----------
+ texts : matplotlib Text object, or its list-like
+ target text, or its list
+ expected : str or list-like which has the same length as texts
+ expected text label, or its list
+ """
+ if not is_list_like(texts):
+ assert texts.get_text() == expected
+ else:
+ labels = [t.get_text() for t in texts]
+ assert len(labels) == len(expected)
+ for label, e in zip(labels, expected):
+ assert label == e
+
+
+def _check_ticks_props(axes, xlabelsize=None, xrot=None, ylabelsize=None, yrot=None):
+ """
+ Check each axes has expected tick properties
+
+ Parameters
+ ----------
+ axes : matplotlib Axes object, or its list-like
+ xlabelsize : number
+ expected xticks font size
+ xrot : number
+ expected xticks rotation
+ ylabelsize : number
+ expected yticks font size
+ yrot : number
+ expected yticks rotation
+ """
+ from matplotlib.ticker import NullFormatter
+
+ axes = _flatten_visible(axes)
+ for ax in axes:
+ if xlabelsize is not None or xrot is not None:
+ if isinstance(ax.xaxis.get_minor_formatter(), NullFormatter):
+ # If minor ticks has NullFormatter, rot / fontsize are not
+ # retained
+ labels = ax.get_xticklabels()
+ else:
+ labels = ax.get_xticklabels() + ax.get_xticklabels(minor=True)
+
+ for label in labels:
+ if xlabelsize is not None:
+ tm.assert_almost_equal(label.get_fontsize(), xlabelsize)
+ if xrot is not None:
+ tm.assert_almost_equal(label.get_rotation(), xrot)
+
+ if ylabelsize is not None or yrot is not None:
+ if isinstance(ax.yaxis.get_minor_formatter(), NullFormatter):
+ labels = ax.get_yticklabels()
+ else:
+ labels = ax.get_yticklabels() + ax.get_yticklabels(minor=True)
+
+ for label in labels:
+ if ylabelsize is not None:
+ tm.assert_almost_equal(label.get_fontsize(), ylabelsize)
+ if yrot is not None:
+ tm.assert_almost_equal(label.get_rotation(), yrot)
+
+
+def _check_ax_scales(axes, xaxis="linear", yaxis="linear"):
+ """
+ Check each axes has expected scales
+
+ Parameters
+ ----------
+ axes : matplotlib Axes object, or its list-like
+ xaxis : {'linear', 'log'}
+ expected xaxis scale
+ yaxis : {'linear', 'log'}
+ expected yaxis scale
+ """
+ axes = _flatten_visible(axes)
+ for ax in axes:
+ assert ax.xaxis.get_scale() == xaxis
+ assert ax.yaxis.get_scale() == yaxis
+
+
+def _check_axes_shape(axes, axes_num=None, layout=None, figsize=None):
+ """
+ Check expected number of axes is drawn in expected layout
+
+ Parameters
+ ----------
+ axes : matplotlib Axes object, or its list-like
+ axes_num : number
+ expected number of axes. Unnecessary axes should be set to
+ invisible.
+ layout : tuple
+ expected layout, (expected number of rows , columns)
+ figsize : tuple
+ expected figsize. default is matplotlib default
+ """
+ from pandas.plotting._matplotlib.tools import flatten_axes
+
+ if figsize is None:
+ figsize = (6.4, 4.8)
+ visible_axes = _flatten_visible(axes)
+
+ if axes_num is not None:
+ assert len(visible_axes) == axes_num
+ for ax in visible_axes:
+ # check something drawn on visible axes
+ assert len(ax.get_children()) > 0
+
+ if layout is not None:
+ x_set = set()
+ y_set = set()
+ for ax in flatten_axes(axes):
+ # check axes coordinates to estimate layout
+ points = ax.get_position().get_points()
+ x_set.add(points[0][0])
+ y_set.add(points[0][1])
+ result = (len(y_set), len(x_set))
+ assert result == layout
+
+ tm.assert_numpy_array_equal(
+ visible_axes[0].figure.get_size_inches(),
+ np.array(figsize, dtype=np.float64),
+ )
+
+
+def _flatten_visible(axes: Axes | Sequence[Axes]) -> Sequence[Axes]:
+ """
+ Flatten axes, and filter only visible
+
+ Parameters
+ ----------
+ axes : matplotlib Axes object, or its list-like
+
+ """
+ from pandas.plotting._matplotlib.tools import flatten_axes
+
+ axes_ndarray = flatten_axes(axes)
+ axes = [ax for ax in axes_ndarray if ax.get_visible()]
+ return axes
+
+
+def _check_has_errorbars(axes, xerr=0, yerr=0):
+ """
+ Check axes has expected number of errorbars
+
+ Parameters
+ ----------
+ axes : matplotlib Axes object, or its list-like
+ xerr : number
+ expected number of x errorbar
+ yerr : number
+ expected number of y errorbar
+ """
+ axes = _flatten_visible(axes)
+ for ax in axes:
+ containers = ax.containers
+ xerr_count = 0
+ yerr_count = 0
+ for c in containers:
+ has_xerr = getattr(c, "has_xerr", False)
+ has_yerr = getattr(c, "has_yerr", False)
+ if has_xerr:
+ xerr_count += 1
+ if has_yerr:
+ yerr_count += 1
+ assert xerr == xerr_count
+ assert yerr == yerr_count
+
+
+def _check_box_return_type(
+ returned, return_type, expected_keys=None, check_ax_title=True
+):
+ """
+ Check box returned type is correct
+
+ Parameters
+ ----------
+ returned : object to be tested, returned from boxplot
+ return_type : str
+ return_type passed to boxplot
+ expected_keys : list-like, optional
+ group labels in subplot case. If not passed,
+ the function checks assuming boxplot uses single ax
+ check_ax_title : bool
+ Whether to check the ax.title is the same as expected_key
+ Intended to be checked by calling from ``boxplot``.
+ Normal ``plot`` doesn't attach ``ax.title``, it must be disabled.
+ """
+ from matplotlib.axes import Axes
+
+ types = {"dict": dict, "axes": Axes, "both": tuple}
+ if expected_keys is None:
+ # should be fixed when the returning default is changed
+ if return_type is None:
+ return_type = "dict"
+
+ assert isinstance(returned, types[return_type])
+ if return_type == "both":
+ assert isinstance(returned.ax, Axes)
+ assert isinstance(returned.lines, dict)
+ else:
+ # should be fixed when the returning default is changed
+ if return_type is None:
+ for r in _flatten_visible(returned):
+ assert isinstance(r, Axes)
+ return
+
+ assert isinstance(returned, Series)
+
+ assert sorted(returned.keys()) == sorted(expected_keys)
+ for key, value in returned.items():
+ assert isinstance(value, types[return_type])
+ # check returned dict has correct mapping
+ if return_type == "axes":
+ if check_ax_title:
+ assert value.get_title() == key
+ elif return_type == "both":
+ if check_ax_title:
+ assert value.ax.get_title() == key
+ assert isinstance(value.ax, Axes)
+ assert isinstance(value.lines, dict)
+ elif return_type == "dict":
+ line = value["medians"][0]
+ axes = line.axes
+ if check_ax_title:
+ assert axes.get_title() == key
+ else:
+ raise AssertionError
+
+
+def _check_grid_settings(obj, kinds, kws={}):
+ # Make sure plot defaults to rcParams['axes.grid'] setting, GH 9792
+
+ import matplotlib as mpl
+
+ def is_grid_on():
+ xticks = mpl.pyplot.gca().xaxis.get_major_ticks()
+ yticks = mpl.pyplot.gca().yaxis.get_major_ticks()
+ xoff = all(not g.gridline.get_visible() for g in xticks)
+ yoff = all(not g.gridline.get_visible() for g in yticks)
+
+ return not (xoff and yoff)
+
+ spndx = 1
+ for kind in kinds:
+ mpl.pyplot.subplot(1, 4 * len(kinds), spndx)
+ spndx += 1
+ mpl.rc("axes", grid=False)
+ obj.plot(kind=kind, **kws)
+ assert not is_grid_on()
+ mpl.pyplot.clf()
+
+ mpl.pyplot.subplot(1, 4 * len(kinds), spndx)
+ spndx += 1
+ mpl.rc("axes", grid=True)
+ obj.plot(kind=kind, grid=False, **kws)
+ assert not is_grid_on()
+ mpl.pyplot.clf()
+
+ if kind not in ["pie", "hexbin", "scatter"]:
+ mpl.pyplot.subplot(1, 4 * len(kinds), spndx)
+ spndx += 1
+ mpl.rc("axes", grid=True)
+ obj.plot(kind=kind, **kws)
+ assert is_grid_on()
+ mpl.pyplot.clf()
+
+ mpl.pyplot.subplot(1, 4 * len(kinds), spndx)
+ spndx += 1
+ mpl.rc("axes", grid=False)
+ obj.plot(kind=kind, grid=True, **kws)
+ assert is_grid_on()
+ mpl.pyplot.clf()
+
+
+def _unpack_cycler(rcParams, field="color"):
+ """
+ Auxiliary function for correctly unpacking cycler after MPL >= 1.5
+ """
+ return [v[field] for v in rcParams["axes.prop_cycle"]]
+
+
+def get_x_axis(ax):
+ return ax._shared_axes["x"]
+
+
+def get_y_axis(ax):
+ return ax._shared_axes["y"]
+
+
+def _check_plot_works(f, default_axes=False, **kwargs):
+ """
+ Create plot and ensure that plot return object is valid.
+
+ Parameters
+ ----------
+ f : func
+ Plotting function.
+ default_axes : bool, optional
+ If False (default):
+ - If `ax` not in `kwargs`, then create subplot(211) and plot there
+ - Create new subplot(212) and plot there as well
+ - Mind special corner case for bootstrap_plot (see `_gen_two_subplots`)
+ If True:
+ - Simply run plotting function with kwargs provided
+ - All required axes instances will be created automatically
+ - It is recommended to use it when the plotting function
+ creates multiple axes itself. It helps avoid warnings like
+ 'UserWarning: To output multiple subplots,
+ the figure containing the passed axes is being cleared'
+ **kwargs
+ Keyword arguments passed to the plotting function.
+
+ Returns
+ -------
+ Plot object returned by the last plotting.
+ """
+ import matplotlib.pyplot as plt
+
+ if default_axes:
+ gen_plots = _gen_default_plot
+ else:
+ gen_plots = _gen_two_subplots
+
+ ret = None
+ try:
+ fig = kwargs.get("figure", plt.gcf())
+ plt.clf()
+
+ for ret in gen_plots(f, fig, **kwargs):
+ tm.assert_is_valid_plot_return_object(ret)
+
+ finally:
+ plt.close(fig)
+
+ return ret
+
+
+def _gen_default_plot(f, fig, **kwargs):
+ """
+ Create plot in a default way.
+ """
+ yield f(**kwargs)
+
+
+def _gen_two_subplots(f, fig, **kwargs):
+ """
+ Create plot on two subplots forcefully created.
+ """
+ if "ax" not in kwargs:
+ fig.add_subplot(211)
+ yield f(**kwargs)
+
+ if f is pd.plotting.bootstrap_plot:
+ assert "ax" not in kwargs
+ else:
+ kwargs["ax"] = fig.add_subplot(212)
+ yield f(**kwargs)
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/conftest.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/conftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..d688bbd47595c2ec6451bd9ddf7c916275013384
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/conftest.py
@@ -0,0 +1,56 @@
+import gc
+
+import numpy as np
+import pytest
+
+from pandas import (
+ DataFrame,
+ to_datetime,
+)
+
+
+@pytest.fixture(autouse=True)
+def mpl_cleanup():
+ # matplotlib/testing/decorators.py#L24
+ # 1) Resets units registry
+ # 2) Resets rc_context
+ # 3) Closes all figures
+ mpl = pytest.importorskip("matplotlib")
+ mpl_units = pytest.importorskip("matplotlib.units")
+ plt = pytest.importorskip("matplotlib.pyplot")
+ orig_units_registry = mpl_units.registry.copy()
+ with mpl.rc_context():
+ mpl.use("template")
+ yield
+ mpl_units.registry.clear()
+ mpl_units.registry.update(orig_units_registry)
+ plt.close("all")
+ # https://matplotlib.org/stable/users/prev_whats_new/whats_new_3.6.0.html#garbage-collection-is-no-longer-run-on-figure-close # noqa: E501
+ gc.collect(1)
+
+
+@pytest.fixture
+def hist_df():
+ n = 50
+ rng = np.random.default_rng(10)
+ gender = rng.choice(["Male", "Female"], size=n)
+ classroom = rng.choice(["A", "B", "C"], size=n)
+
+ hist_df = DataFrame(
+ {
+ "gender": gender,
+ "classroom": classroom,
+ "height": rng.normal(66, 4, size=n),
+ "weight": rng.normal(161, 32, size=n),
+ "category": rng.integers(4, size=n),
+ "datetime": to_datetime(
+ rng.integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=n,
+ dtype=np.int64,
+ )
+ ),
+ }
+ )
+ return hist_df
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__init__.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d9fb8db299e9593a85928fa0e6f5690c0850b69c
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_color.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_color.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5cc7ef7a21b8c53fce0b15f3b6c4e8d930ec7ca2
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_color.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_groupby.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_groupby.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0f4ce09999264b1a22581c7a66981df2c5135bd3
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_groupby.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_legend.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_legend.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..85f286ed01966233c155d0fda16275f79dde43de
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_legend.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_subplots.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_subplots.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c56e7f12c60365c4c47a5eab938bb176f6a274e0
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame_subplots.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_hist_box_by.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_hist_box_by.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..48d8e9b326f3cf58135ab331f1286ae94b4b93b8
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/__pycache__/test_hist_box_by.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ca4067214bbd20e56065bf74d6851e926679b88
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame.py
@@ -0,0 +1,2599 @@
+""" Test cases for DataFrame.plot """
+from datetime import (
+ date,
+ datetime,
+)
+import gc
+import itertools
+import re
+import string
+import weakref
+
+import numpy as np
+import pytest
+
+import pandas.util._test_decorators as td
+
+from pandas.core.dtypes.api import is_list_like
+
+import pandas as pd
+from pandas import (
+ DataFrame,
+ Index,
+ MultiIndex,
+ PeriodIndex,
+ Series,
+ bdate_range,
+ date_range,
+ option_context,
+ plotting,
+)
+import pandas._testing as tm
+from pandas.tests.plotting.common import (
+ _check_ax_scales,
+ _check_axes_shape,
+ _check_box_return_type,
+ _check_colors,
+ _check_data,
+ _check_grid_settings,
+ _check_has_errorbars,
+ _check_legend_labels,
+ _check_plot_works,
+ _check_text_labels,
+ _check_ticks_props,
+ _check_visible,
+ get_y_axis,
+)
+from pandas.util.version import Version
+
+from pandas.io.formats.printing import pprint_thing
+
+mpl = pytest.importorskip("matplotlib")
+plt = pytest.importorskip("matplotlib.pyplot")
+
+
+class TestDataFramePlots:
+ @pytest.mark.slow
+ def test_plot(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ _check_plot_works(df.plot, grid=False)
+
+ @pytest.mark.slow
+ def test_plot_subplots(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ # _check_plot_works adds an ax so use default_axes=True to avoid warning
+ axes = _check_plot_works(df.plot, default_axes=True, subplots=True)
+ _check_axes_shape(axes, axes_num=4, layout=(4, 1))
+
+ @pytest.mark.slow
+ def test_plot_subplots_negative_layout(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ axes = _check_plot_works(
+ df.plot,
+ default_axes=True,
+ subplots=True,
+ layout=(-1, 2),
+ )
+ _check_axes_shape(axes, axes_num=4, layout=(2, 2))
+
+ @pytest.mark.slow
+ def test_plot_subplots_use_index(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ axes = _check_plot_works(
+ df.plot,
+ default_axes=True,
+ subplots=True,
+ use_index=False,
+ )
+ _check_ticks_props(axes, xrot=0)
+ _check_axes_shape(axes, axes_num=4, layout=(4, 1))
+
+ @pytest.mark.xfail(reason="Api changed in 3.6.0")
+ @pytest.mark.slow
+ def test_plot_invalid_arg(self):
+ df = DataFrame({"x": [1, 2], "y": [3, 4]})
+ msg = "'Line2D' object has no property 'blarg'"
+ with pytest.raises(AttributeError, match=msg):
+ df.plot.line(blarg=True)
+
+ @pytest.mark.slow
+ def test_plot_tick_props(self):
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+
+ ax = _check_plot_works(df.plot, use_index=True)
+ _check_ticks_props(ax, xrot=0)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "kwargs",
+ [
+ {"yticks": [1, 5, 10]},
+ {"xticks": [1, 5, 10]},
+ {"ylim": (-100, 100), "xlim": (-100, 100)},
+ {"default_axes": True, "subplots": True, "title": "blah"},
+ ],
+ )
+ def test_plot_other_args(self, kwargs):
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+ _check_plot_works(df.plot, **kwargs)
+
+ @pytest.mark.slow
+ def test_plot_visible_ax(self):
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+ # We have to redo it here because _check_plot_works does two plots,
+ # once without an ax kwarg and once with an ax kwarg and the new sharex
+ # behaviour does not remove the visibility of the latter axis (as ax is
+ # present). see: https://github.com/pandas-dev/pandas/issues/9737
+
+ axes = df.plot(subplots=True, title="blah")
+ _check_axes_shape(axes, axes_num=3, layout=(3, 1))
+ for ax in axes[:2]:
+ _check_visible(ax.xaxis) # xaxis must be visible for grid
+ _check_visible(ax.get_xticklabels(), visible=False)
+ _check_visible(ax.get_xticklabels(minor=True), visible=False)
+ _check_visible([ax.xaxis.get_label()], visible=False)
+ for ax in [axes[2]]:
+ _check_visible(ax.xaxis)
+ _check_visible(ax.get_xticklabels())
+ _check_visible([ax.xaxis.get_label()])
+ _check_ticks_props(ax, xrot=0)
+
+ @pytest.mark.slow
+ def test_plot_title(self):
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+ _check_plot_works(df.plot, title="blah")
+
+ @pytest.mark.slow
+ def test_plot_multiindex(self):
+ tuples = zip(string.ascii_letters[:10], range(10))
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=MultiIndex.from_tuples(tuples),
+ )
+ ax = _check_plot_works(df.plot, use_index=True)
+ _check_ticks_props(ax, xrot=0)
+
+ @pytest.mark.slow
+ def test_plot_multiindex_unicode(self):
+ # unicode
+ index = MultiIndex.from_tuples(
+ [
+ ("\u03b1", 0),
+ ("\u03b1", 1),
+ ("\u03b2", 2),
+ ("\u03b2", 3),
+ ("\u03b3", 4),
+ ("\u03b3", 5),
+ ("\u03b4", 6),
+ ("\u03b4", 7),
+ ],
+ names=["i0", "i1"],
+ )
+ columns = MultiIndex.from_tuples(
+ [("bar", "\u0394"), ("bar", "\u0395")], names=["c0", "c1"]
+ )
+ df = DataFrame(
+ np.random.default_rng(2).integers(0, 10, (8, 2)),
+ columns=columns,
+ index=index,
+ )
+ _check_plot_works(df.plot, title="\u03A3")
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("layout", [None, (-1, 1)])
+ def test_plot_single_column_bar(self, layout):
+ # GH 6951
+ # Test with single column
+ df = DataFrame({"x": np.random.default_rng(2).random(10)})
+ axes = _check_plot_works(df.plot.bar, subplots=True, layout=layout)
+ _check_axes_shape(axes, axes_num=1, layout=(1, 1))
+
+ @pytest.mark.slow
+ def test_plot_passed_ax(self):
+ # When ax is supplied and required number of axes is 1,
+ # passed ax should be used:
+ df = DataFrame({"x": np.random.default_rng(2).random(10)})
+ _, ax = mpl.pyplot.subplots()
+ axes = df.plot.bar(subplots=True, ax=ax)
+ assert len(axes) == 1
+ result = ax.axes
+ assert result is axes[0]
+
+ @pytest.mark.parametrize(
+ "cols, x, y",
+ [
+ [list("ABCDE"), "A", "B"],
+ [["A", "B"], "A", "B"],
+ [["C", "A"], "C", "A"],
+ [["A", "C"], "A", "C"],
+ [["B", "C"], "B", "C"],
+ [["A", "D"], "A", "D"],
+ [["A", "E"], "A", "E"],
+ ],
+ )
+ def test_nullable_int_plot(self, cols, x, y):
+ # GH 32073
+ dates = ["2008", "2009", None, "2011", "2012"]
+ df = DataFrame(
+ {
+ "A": [1, 2, 3, 4, 5],
+ "B": [1, 2, 3, 4, 5],
+ "C": np.array([7, 5, np.nan, 3, 2], dtype=object),
+ "D": pd.to_datetime(dates, format="%Y").view("i8"),
+ "E": pd.to_datetime(dates, format="%Y", utc=True).view("i8"),
+ }
+ )
+
+ _check_plot_works(df[cols].plot, x=x, y=y)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("plot", ["line", "bar", "hist", "pie"])
+ def test_integer_array_plot_series(self, plot):
+ # GH 25587
+ arr = pd.array([1, 2, 3, 4], dtype="UInt32")
+
+ s = Series(arr)
+ _check_plot_works(getattr(s.plot, plot))
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "plot, kwargs",
+ [
+ ["line", {}],
+ ["bar", {}],
+ ["hist", {}],
+ ["pie", {"y": "y"}],
+ ["scatter", {"x": "x", "y": "y"}],
+ ["hexbin", {"x": "x", "y": "y"}],
+ ],
+ )
+ def test_integer_array_plot_df(self, plot, kwargs):
+ # GH 25587
+ arr = pd.array([1, 2, 3, 4], dtype="UInt32")
+ df = DataFrame({"x": arr, "y": arr})
+ _check_plot_works(getattr(df.plot, plot), **kwargs)
+
+ def test_nonnumeric_exclude(self):
+ df = DataFrame({"A": ["x", "y", "z"], "B": [1, 2, 3]})
+ ax = df.plot()
+ assert len(ax.get_lines()) == 1 # B was plotted
+
+ def test_implicit_label(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 3)), columns=["a", "b", "c"]
+ )
+ ax = df.plot(x="a", y="b")
+ _check_text_labels(ax.xaxis.get_label(), "a")
+
+ def test_donot_overwrite_index_name(self):
+ # GH 8494
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((2, 2)), columns=["a", "b"]
+ )
+ df.index.name = "NAME"
+ df.plot(y="b", label="LABEL")
+ assert df.index.name == "NAME"
+
+ def test_plot_xy(self):
+ # columns.inferred_type == 'string'
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((5, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=5, freq="B"),
+ )
+ _check_data(df.plot(x=0, y=1), df.set_index("A")["B"].plot())
+ _check_data(df.plot(x=0), df.set_index("A").plot())
+ _check_data(df.plot(y=0), df.B.plot())
+ _check_data(df.plot(x="A", y="B"), df.set_index("A").B.plot())
+ _check_data(df.plot(x="A"), df.set_index("A").plot())
+ _check_data(df.plot(y="B"), df.B.plot())
+
+ def test_plot_xy_int_cols(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((5, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=5, freq="B"),
+ )
+ # columns.inferred_type == 'integer'
+ df.columns = np.arange(1, len(df.columns) + 1)
+ _check_data(df.plot(x=1, y=2), df.set_index(1)[2].plot())
+ _check_data(df.plot(x=1), df.set_index(1).plot())
+ _check_data(df.plot(y=1), df[1].plot())
+
+ def test_plot_xy_figsize_and_title(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((5, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=5, freq="B"),
+ )
+ # figsize and title
+ ax = df.plot(x=1, y=2, title="Test", figsize=(16, 8))
+ _check_text_labels(ax.title, "Test")
+ _check_axes_shape(ax, axes_num=1, layout=(1, 1), figsize=(16.0, 8.0))
+
+ # columns.inferred_type == 'mixed'
+ # TODO add MultiIndex test
+
+ @pytest.mark.parametrize(
+ "input_log, expected_log", [(True, "log"), ("sym", "symlog")]
+ )
+ def test_logscales(self, input_log, expected_log):
+ df = DataFrame({"a": np.arange(100)}, index=np.arange(100))
+
+ ax = df.plot(logy=input_log)
+ _check_ax_scales(ax, yaxis=expected_log)
+ assert ax.get_yscale() == expected_log
+
+ ax = df.plot(logx=input_log)
+ _check_ax_scales(ax, xaxis=expected_log)
+ assert ax.get_xscale() == expected_log
+
+ ax = df.plot(loglog=input_log)
+ _check_ax_scales(ax, xaxis=expected_log, yaxis=expected_log)
+ assert ax.get_xscale() == expected_log
+ assert ax.get_yscale() == expected_log
+
+ @pytest.mark.parametrize("input_param", ["logx", "logy", "loglog"])
+ def test_invalid_logscale(self, input_param):
+ # GH: 24867
+ df = DataFrame({"a": np.arange(100)}, index=np.arange(100))
+
+ msg = f"keyword '{input_param}' should be bool, None, or 'sym', not 'sm'"
+ with pytest.raises(ValueError, match=msg):
+ df.plot(**{input_param: "sm"})
+
+ msg = f"PiePlot ignores the '{input_param}' keyword"
+ with tm.assert_produces_warning(UserWarning, match=msg):
+ df.plot.pie(subplots=True, **{input_param: True})
+
+ def test_xcompat(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ ax = df.plot(x_compat=True)
+ lines = ax.get_lines()
+ assert not isinstance(lines[0].get_xdata(), PeriodIndex)
+ _check_ticks_props(ax, xrot=30)
+
+ def test_xcompat_plot_params(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ plotting.plot_params["xaxis.compat"] = True
+ ax = df.plot()
+ lines = ax.get_lines()
+ assert not isinstance(lines[0].get_xdata(), PeriodIndex)
+ _check_ticks_props(ax, xrot=30)
+
+ def test_xcompat_plot_params_x_compat(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ plotting.plot_params["x_compat"] = False
+
+ ax = df.plot()
+ lines = ax.get_lines()
+ assert not isinstance(lines[0].get_xdata(), PeriodIndex)
+ msg = r"PeriodDtype\[B\] is deprecated"
+ with tm.assert_produces_warning(FutureWarning, match=msg):
+ assert isinstance(PeriodIndex(lines[0].get_xdata()), PeriodIndex)
+
+ def test_xcompat_plot_params_context_manager(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ # useful if you're plotting a bunch together
+ with plotting.plot_params.use("x_compat", True):
+ ax = df.plot()
+ lines = ax.get_lines()
+ assert not isinstance(lines[0].get_xdata(), PeriodIndex)
+ _check_ticks_props(ax, xrot=30)
+
+ def test_xcompat_plot_period(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ ax = df.plot()
+ lines = ax.get_lines()
+ assert not isinstance(lines[0].get_xdata(), PeriodIndex)
+ msg = r"PeriodDtype\[B\] is deprecated "
+ with tm.assert_produces_warning(FutureWarning, match=msg):
+ assert isinstance(PeriodIndex(lines[0].get_xdata()), PeriodIndex)
+ _check_ticks_props(ax, xrot=0)
+
+ def test_period_compat(self):
+ # GH 9012
+ # period-array conversions
+ df = DataFrame(
+ np.random.default_rng(2).random((21, 2)),
+ index=bdate_range(datetime(2000, 1, 1), datetime(2000, 1, 31)),
+ columns=["a", "b"],
+ )
+
+ df.plot()
+ mpl.pyplot.axhline(y=0)
+
+ @pytest.mark.parametrize("index_dtype", [np.int64, np.float64])
+ def test_unsorted_index(self, index_dtype):
+ df = DataFrame(
+ {"y": np.arange(100)},
+ index=Index(np.arange(99, -1, -1), dtype=index_dtype),
+ dtype=np.int64,
+ )
+ ax = df.plot()
+ lines = ax.get_lines()[0]
+ rs = lines.get_xydata()
+ rs = Series(rs[:, 1], rs[:, 0], dtype=np.int64, name="y")
+ tm.assert_series_equal(rs, df.y, check_index_type=False)
+
+ @pytest.mark.parametrize(
+ "df",
+ [
+ DataFrame({"y": [0.0, 1.0, 2.0, 3.0]}, index=[1.0, 0.0, 3.0, 2.0]),
+ DataFrame(
+ {"y": [0.0, 1.0, np.nan, 3.0, 4.0, 5.0, 6.0]},
+ index=[1.0, 0.0, 3.0, 2.0, np.nan, 3.0, 2.0],
+ ),
+ ],
+ )
+ def test_unsorted_index_lims(self, df):
+ ax = df.plot()
+ xmin, xmax = ax.get_xlim()
+ lines = ax.get_lines()
+ assert xmin <= np.nanmin(lines[0].get_data()[0])
+ assert xmax >= np.nanmax(lines[0].get_data()[0])
+
+ def test_unsorted_index_lims_x_y(self):
+ df = DataFrame({"y": [0.0, 1.0, 2.0, 3.0], "z": [91.0, 90.0, 93.0, 92.0]})
+ ax = df.plot(x="z", y="y")
+ xmin, xmax = ax.get_xlim()
+ lines = ax.get_lines()
+ assert xmin <= np.nanmin(lines[0].get_data()[0])
+ assert xmax >= np.nanmax(lines[0].get_data()[0])
+
+ def test_negative_log(self):
+ df = -DataFrame(
+ np.random.default_rng(2).random((6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["x", "y", "z", "four"],
+ )
+ msg = "Log-y scales are not supported in area plot"
+ with pytest.raises(ValueError, match=msg):
+ df.plot.area(logy=True)
+ with pytest.raises(ValueError, match=msg):
+ df.plot.area(loglog=True)
+
+ def _compare_stacked_y_cood(self, normal_lines, stacked_lines):
+ base = np.zeros(len(normal_lines[0].get_data()[1]))
+ for nl, sl in zip(normal_lines, stacked_lines):
+ base += nl.get_data()[1] # get y coordinates
+ sy = sl.get_data()[1]
+ tm.assert_numpy_array_equal(base, sy)
+
+ @pytest.mark.parametrize("kind", ["line", "area"])
+ @pytest.mark.parametrize("mult", [1, -1])
+ def test_line_area_stacked(self, kind, mult):
+ df = mult * DataFrame(
+ np.random.default_rng(2).random((6, 4)), columns=["w", "x", "y", "z"]
+ )
+
+ ax1 = _check_plot_works(df.plot, kind=kind, stacked=False)
+ ax2 = _check_plot_works(df.plot, kind=kind, stacked=True)
+ self._compare_stacked_y_cood(ax1.lines, ax2.lines)
+
+ @pytest.mark.parametrize("kind", ["line", "area"])
+ def test_line_area_stacked_sep_df(self, kind):
+ # each column has either positive or negative value
+ sep_df = DataFrame(
+ {
+ "w": np.random.default_rng(2).random(6),
+ "x": np.random.default_rng(2).random(6),
+ "y": -np.random.default_rng(2).random(6),
+ "z": -np.random.default_rng(2).random(6),
+ }
+ )
+ ax1 = _check_plot_works(sep_df.plot, kind=kind, stacked=False)
+ ax2 = _check_plot_works(sep_df.plot, kind=kind, stacked=True)
+ self._compare_stacked_y_cood(ax1.lines[:2], ax2.lines[:2])
+ self._compare_stacked_y_cood(ax1.lines[2:], ax2.lines[2:])
+
+ def test_line_area_stacked_mixed(self):
+ mixed_df = DataFrame(
+ np.random.default_rng(2).standard_normal((6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["w", "x", "y", "z"],
+ )
+ _check_plot_works(mixed_df.plot, stacked=False)
+
+ msg = (
+ "When stacked is True, each column must be either all positive or "
+ "all negative. Column 'w' contains both positive and negative "
+ "values"
+ )
+ with pytest.raises(ValueError, match=msg):
+ mixed_df.plot(stacked=True)
+
+ @pytest.mark.parametrize("kind", ["line", "area"])
+ def test_line_area_stacked_positive_idx(self, kind):
+ df = DataFrame(
+ np.random.default_rng(2).random((6, 4)), columns=["w", "x", "y", "z"]
+ )
+ # Use an index with strictly positive values, preventing
+ # matplotlib from warning about ignoring xlim
+ df2 = df.set_index(df.index + 1)
+ _check_plot_works(df2.plot, kind=kind, logx=True, stacked=True)
+
+ @pytest.mark.parametrize(
+ "idx", [range(4), date_range("2023-01-1", freq="D", periods=4)]
+ )
+ def test_line_area_nan_df(self, idx):
+ values1 = [1, 2, np.nan, 3]
+ values2 = [3, np.nan, 2, 1]
+ df = DataFrame({"a": values1, "b": values2}, index=idx)
+
+ ax = _check_plot_works(df.plot)
+ masked1 = ax.lines[0].get_ydata()
+ masked2 = ax.lines[1].get_ydata()
+ # remove nan for comparison purpose
+
+ exp = np.array([1, 2, 3], dtype=np.float64)
+ tm.assert_numpy_array_equal(np.delete(masked1.data, 2), exp)
+
+ exp = np.array([3, 2, 1], dtype=np.float64)
+ tm.assert_numpy_array_equal(np.delete(masked2.data, 1), exp)
+ tm.assert_numpy_array_equal(masked1.mask, np.array([False, False, True, False]))
+ tm.assert_numpy_array_equal(masked2.mask, np.array([False, True, False, False]))
+
+ @pytest.mark.parametrize(
+ "idx", [range(4), date_range("2023-01-1", freq="D", periods=4)]
+ )
+ def test_line_area_nan_df_stacked(self, idx):
+ values1 = [1, 2, np.nan, 3]
+ values2 = [3, np.nan, 2, 1]
+ df = DataFrame({"a": values1, "b": values2}, index=idx)
+
+ expected1 = np.array([1, 2, 0, 3], dtype=np.float64)
+ expected2 = np.array([3, 0, 2, 1], dtype=np.float64)
+
+ ax = _check_plot_works(df.plot, stacked=True)
+ tm.assert_numpy_array_equal(ax.lines[0].get_ydata(), expected1)
+ tm.assert_numpy_array_equal(ax.lines[1].get_ydata(), expected1 + expected2)
+
+ @pytest.mark.parametrize(
+ "idx", [range(4), date_range("2023-01-1", freq="D", periods=4)]
+ )
+ @pytest.mark.parametrize("kwargs", [{}, {"stacked": False}])
+ def test_line_area_nan_df_stacked_area(self, idx, kwargs):
+ values1 = [1, 2, np.nan, 3]
+ values2 = [3, np.nan, 2, 1]
+ df = DataFrame({"a": values1, "b": values2}, index=idx)
+
+ expected1 = np.array([1, 2, 0, 3], dtype=np.float64)
+ expected2 = np.array([3, 0, 2, 1], dtype=np.float64)
+
+ ax = _check_plot_works(df.plot.area, **kwargs)
+ tm.assert_numpy_array_equal(ax.lines[0].get_ydata(), expected1)
+ if kwargs:
+ tm.assert_numpy_array_equal(ax.lines[1].get_ydata(), expected2)
+ else:
+ tm.assert_numpy_array_equal(ax.lines[1].get_ydata(), expected1 + expected2)
+
+ ax = _check_plot_works(df.plot.area, stacked=False)
+ tm.assert_numpy_array_equal(ax.lines[0].get_ydata(), expected1)
+ tm.assert_numpy_array_equal(ax.lines[1].get_ydata(), expected2)
+
+ @pytest.mark.parametrize("kwargs", [{}, {"secondary_y": True}])
+ def test_line_lim(self, kwargs):
+ df = DataFrame(np.random.default_rng(2).random((6, 3)), columns=["x", "y", "z"])
+ ax = df.plot(**kwargs)
+ xmin, xmax = ax.get_xlim()
+ lines = ax.get_lines()
+ assert xmin <= lines[0].get_data()[0][0]
+ assert xmax >= lines[0].get_data()[0][-1]
+
+ def test_line_lim_subplots(self):
+ df = DataFrame(np.random.default_rng(2).random((6, 3)), columns=["x", "y", "z"])
+ axes = df.plot(secondary_y=True, subplots=True)
+ _check_axes_shape(axes, axes_num=3, layout=(3, 1))
+ for ax in axes:
+ assert hasattr(ax, "left_ax")
+ assert not hasattr(ax, "right_ax")
+ xmin, xmax = ax.get_xlim()
+ lines = ax.get_lines()
+ assert xmin <= lines[0].get_data()[0][0]
+ assert xmax >= lines[0].get_data()[0][-1]
+
+ @pytest.mark.xfail(
+ strict=False,
+ reason="2020-12-01 this has been failing periodically on the "
+ "ymin==0 assertion for a week or so.",
+ )
+ @pytest.mark.parametrize("stacked", [True, False])
+ def test_area_lim(self, stacked):
+ df = DataFrame(
+ np.random.default_rng(2).random((6, 4)), columns=["x", "y", "z", "four"]
+ )
+
+ neg_df = -df
+
+ ax = _check_plot_works(df.plot.area, stacked=stacked)
+ xmin, xmax = ax.get_xlim()
+ ymin, ymax = ax.get_ylim()
+ lines = ax.get_lines()
+ assert xmin <= lines[0].get_data()[0][0]
+ assert xmax >= lines[0].get_data()[0][-1]
+ assert ymin == 0
+
+ ax = _check_plot_works(neg_df.plot.area, stacked=stacked)
+ ymin, ymax = ax.get_ylim()
+ assert ymax == 0
+
+ def test_area_sharey_dont_overwrite(self):
+ # GH37942
+ df = DataFrame(np.random.default_rng(2).random((4, 2)), columns=["x", "y"])
+ fig, (ax1, ax2) = mpl.pyplot.subplots(1, 2, sharey=True)
+
+ df.plot(ax=ax1, kind="area")
+ df.plot(ax=ax2, kind="area")
+
+ assert get_y_axis(ax1).joined(ax1, ax2)
+ assert get_y_axis(ax2).joined(ax1, ax2)
+
+ @pytest.mark.parametrize("stacked", [True, False])
+ def test_bar_linewidth(self, stacked):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+
+ ax = df.plot.bar(stacked=stacked, linewidth=2)
+ for r in ax.patches:
+ assert r.get_linewidth() == 2
+
+ def test_bar_linewidth_subplots(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # subplots
+ axes = df.plot.bar(linewidth=2, subplots=True)
+ _check_axes_shape(axes, axes_num=5, layout=(5, 1))
+ for ax in axes:
+ for r in ax.patches:
+ assert r.get_linewidth() == 2
+
+ @pytest.mark.parametrize(
+ "meth, dim", [("bar", "get_width"), ("barh", "get_height")]
+ )
+ @pytest.mark.parametrize("stacked", [True, False])
+ def test_bar_barwidth(self, meth, dim, stacked):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+
+ width = 0.9
+
+ ax = getattr(df.plot, meth)(stacked=stacked, width=width)
+ for r in ax.patches:
+ if not stacked:
+ assert getattr(r, dim)() == width / len(df.columns)
+ else:
+ assert getattr(r, dim)() == width
+
+ @pytest.mark.parametrize(
+ "meth, dim", [("bar", "get_width"), ("barh", "get_height")]
+ )
+ def test_barh_barwidth_subplots(self, meth, dim):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+
+ width = 0.9
+
+ axes = getattr(df.plot, meth)(width=width, subplots=True)
+ for ax in axes:
+ for r in ax.patches:
+ assert getattr(r, dim)() == width
+
+ def test_bar_bottom_left_bottom(self):
+ df = DataFrame(np.random.default_rng(2).random((5, 5)))
+ ax = df.plot.bar(stacked=False, bottom=1)
+ result = [p.get_y() for p in ax.patches]
+ assert result == [1] * 25
+
+ ax = df.plot.bar(stacked=True, bottom=[-1, -2, -3, -4, -5])
+ result = [p.get_y() for p in ax.patches[:5]]
+ assert result == [-1, -2, -3, -4, -5]
+
+ def test_bar_bottom_left_left(self):
+ df = DataFrame(np.random.default_rng(2).random((5, 5)))
+ ax = df.plot.barh(stacked=False, left=np.array([1, 1, 1, 1, 1]))
+ result = [p.get_x() for p in ax.patches]
+ assert result == [1] * 25
+
+ ax = df.plot.barh(stacked=True, left=[1, 2, 3, 4, 5])
+ result = [p.get_x() for p in ax.patches[:5]]
+ assert result == [1, 2, 3, 4, 5]
+
+ def test_bar_bottom_left_subplots(self):
+ df = DataFrame(np.random.default_rng(2).random((5, 5)))
+ axes = df.plot.bar(subplots=True, bottom=-1)
+ for ax in axes:
+ result = [p.get_y() for p in ax.patches]
+ assert result == [-1] * 5
+
+ axes = df.plot.barh(subplots=True, left=np.array([1, 1, 1, 1, 1]))
+ for ax in axes:
+ result = [p.get_x() for p in ax.patches]
+ assert result == [1] * 5
+
+ def test_bar_nan(self):
+ df = DataFrame({"A": [10, np.nan, 20], "B": [5, 10, 20], "C": [1, 2, 3]})
+ ax = df.plot.bar()
+ expected = [10, 0, 20, 5, 10, 20, 1, 2, 3]
+ result = [p.get_height() for p in ax.patches]
+ assert result == expected
+
+ def test_bar_nan_stacked(self):
+ df = DataFrame({"A": [10, np.nan, 20], "B": [5, 10, 20], "C": [1, 2, 3]})
+ ax = df.plot.bar(stacked=True)
+ expected = [10, 0, 20, 5, 10, 20, 1, 2, 3]
+ result = [p.get_height() for p in ax.patches]
+ assert result == expected
+
+ result = [p.get_y() for p in ax.patches]
+ expected = [0.0, 0.0, 0.0, 10.0, 0.0, 20.0, 15.0, 10.0, 40.0]
+ assert result == expected
+
+ @pytest.mark.parametrize("idx", [Index, pd.CategoricalIndex])
+ def test_bar_categorical(self, idx):
+ # GH 13019
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((6, 5)),
+ index=idx(list("ABCDEF")),
+ columns=idx(list("abcde")),
+ )
+
+ ax = df.plot.bar()
+ ticks = ax.xaxis.get_ticklocs()
+ tm.assert_numpy_array_equal(ticks, np.array([0, 1, 2, 3, 4, 5]))
+ assert ax.get_xlim() == (-0.5, 5.5)
+ # check left-edge of bars
+ assert ax.patches[0].get_x() == -0.25
+ assert ax.patches[-1].get_x() == 5.15
+
+ ax = df.plot.bar(stacked=True)
+ tm.assert_numpy_array_equal(ticks, np.array([0, 1, 2, 3, 4, 5]))
+ assert ax.get_xlim() == (-0.5, 5.5)
+ assert ax.patches[0].get_x() == -0.25
+ assert ax.patches[-1].get_x() == 4.75
+
+ @pytest.mark.parametrize("x, y", [("x", "y"), (1, 2)])
+ def test_plot_scatter(self, x, y):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["x", "y", "z", "four"],
+ )
+
+ _check_plot_works(df.plot.scatter, x=x, y=y)
+
+ def test_plot_scatter_error(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["x", "y", "z", "four"],
+ )
+ msg = re.escape("scatter() missing 1 required positional argument: 'y'")
+ with pytest.raises(TypeError, match=msg):
+ df.plot.scatter(x="x")
+ msg = re.escape("scatter() missing 1 required positional argument: 'x'")
+ with pytest.raises(TypeError, match=msg):
+ df.plot.scatter(y="y")
+
+ def test_plot_scatter_shape(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["x", "y", "z", "four"],
+ )
+ # GH 6951
+ axes = df.plot(x="x", y="y", kind="scatter", subplots=True)
+ _check_axes_shape(axes, axes_num=1, layout=(1, 1))
+
+ def test_raise_error_on_datetime_time_data(self):
+ # GH 8113, datetime.time type is not supported by matplotlib in scatter
+ df = DataFrame(np.random.default_rng(2).standard_normal(10), columns=["a"])
+ df["dtime"] = date_range(start="2014-01-01", freq="h", periods=10).time
+ msg = "must be a string or a (real )?number, not 'datetime.time'"
+
+ with pytest.raises(TypeError, match=msg):
+ df.plot(kind="scatter", x="dtime", y="a")
+
+ @pytest.mark.parametrize("x, y", [("dates", "vals"), (0, 1)])
+ def test_scatterplot_datetime_data(self, x, y):
+ # GH 30391
+ dates = date_range(start=date(2019, 1, 1), periods=12, freq="W")
+ vals = np.random.default_rng(2).normal(0, 1, len(dates))
+ df = DataFrame({"dates": dates, "vals": vals})
+
+ _check_plot_works(df.plot.scatter, x=x, y=y)
+
+ @pytest.mark.parametrize(
+ "infer_string", [False, pytest.param(True, marks=td.skip_if_no("pyarrow"))]
+ )
+ @pytest.mark.parametrize("x, y", [("a", "b"), (0, 1)])
+ @pytest.mark.parametrize("b_col", [[2, 3, 4], ["a", "b", "c"]])
+ def test_scatterplot_object_data(self, b_col, x, y, infer_string):
+ # GH 18755
+ with option_context("future.infer_string", infer_string):
+ df = DataFrame({"a": ["A", "B", "C"], "b": b_col})
+
+ _check_plot_works(df.plot.scatter, x=x, y=y)
+
+ @pytest.mark.parametrize("ordered", [True, False])
+ @pytest.mark.parametrize(
+ "categories",
+ (["setosa", "versicolor", "virginica"], ["versicolor", "virginica", "setosa"]),
+ )
+ def test_scatterplot_color_by_categorical(self, ordered, categories):
+ df = DataFrame(
+ [[5.1, 3.5], [4.9, 3.0], [7.0, 3.2], [6.4, 3.2], [5.9, 3.0]],
+ columns=["length", "width"],
+ )
+ df["species"] = pd.Categorical(
+ ["setosa", "setosa", "virginica", "virginica", "versicolor"],
+ ordered=ordered,
+ categories=categories,
+ )
+ ax = df.plot.scatter(x=0, y=1, c="species")
+ (colorbar_collection,) = ax.collections
+ colorbar = colorbar_collection.colorbar
+
+ expected_ticks = np.array([0.5, 1.5, 2.5])
+ result_ticks = colorbar.get_ticks()
+ tm.assert_numpy_array_equal(result_ticks, expected_ticks)
+
+ expected_boundaries = np.array([0.0, 1.0, 2.0, 3.0])
+ result_boundaries = colorbar._boundaries
+ tm.assert_numpy_array_equal(result_boundaries, expected_boundaries)
+
+ expected_yticklabels = categories
+ result_yticklabels = [i.get_text() for i in colorbar.ax.get_ymajorticklabels()]
+ assert all(i == j for i, j in zip(result_yticklabels, expected_yticklabels))
+
+ @pytest.mark.parametrize("x, y", [("x", "y"), ("y", "x"), ("y", "y")])
+ def test_plot_scatter_with_categorical_data(self, x, y):
+ # after fixing GH 18755, should be able to plot categorical data
+ df = DataFrame({"x": [1, 2, 3, 4], "y": pd.Categorical(["a", "b", "a", "c"])})
+
+ _check_plot_works(df.plot.scatter, x=x, y=y)
+
+ @pytest.mark.parametrize("x, y, c", [("x", "y", "z"), (0, 1, 2)])
+ def test_plot_scatter_with_c(self, x, y, c):
+ df = DataFrame(
+ np.random.default_rng(2).integers(low=0, high=100, size=(6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["x", "y", "z", "four"],
+ )
+
+ ax = df.plot.scatter(x=x, y=y, c=c)
+ # default to Greys
+ assert ax.collections[0].cmap.name == "Greys"
+
+ assert ax.collections[0].colorbar.ax.get_ylabel() == "z"
+
+ def test_plot_scatter_with_c_props(self):
+ df = DataFrame(
+ np.random.default_rng(2).integers(low=0, high=100, size=(6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["x", "y", "z", "four"],
+ )
+ cm = "cubehelix"
+ ax = df.plot.scatter(x="x", y="y", c="z", colormap=cm)
+ assert ax.collections[0].cmap.name == cm
+
+ # verify turning off colorbar works
+ ax = df.plot.scatter(x="x", y="y", c="z", colorbar=False)
+ assert ax.collections[0].colorbar is None
+
+ # verify that we can still plot a solid color
+ ax = df.plot.scatter(x=0, y=1, c="red")
+ assert ax.collections[0].colorbar is None
+ _check_colors(ax.collections, facecolors=["r"])
+
+ def test_plot_scatter_with_c_array(self):
+ # Ensure that we can pass an np.array straight through to matplotlib,
+ # this functionality was accidentally removed previously.
+ # See https://github.com/pandas-dev/pandas/issues/8852 for bug report
+ #
+ # Exercise colormap path and non-colormap path as they are independent
+ #
+ df = DataFrame({"A": [1, 2], "B": [3, 4]})
+ red_rgba = [1.0, 0.0, 0.0, 1.0]
+ green_rgba = [0.0, 1.0, 0.0, 1.0]
+ rgba_array = np.array([red_rgba, green_rgba])
+ ax = df.plot.scatter(x="A", y="B", c=rgba_array)
+ # expect the face colors of the points in the non-colormap path to be
+ # identical to the values we supplied, normally we'd be on shaky ground
+ # comparing floats for equality but here we expect them to be
+ # identical.
+ tm.assert_numpy_array_equal(ax.collections[0].get_facecolor(), rgba_array)
+ # we don't test the colors of the faces in this next plot because they
+ # are dependent on the spring colormap, which may change its colors
+ # later.
+ float_array = np.array([0.0, 1.0])
+ df.plot.scatter(x="A", y="B", c=float_array, cmap="spring")
+
+ def test_plot_scatter_with_s(self):
+ # this refers to GH 32904
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)) * 100, columns=["a", "b", "c"]
+ )
+
+ ax = df.plot.scatter(x="a", y="b", s="c")
+ tm.assert_numpy_array_equal(df["c"].values, right=ax.collections[0].get_sizes())
+
+ def test_plot_scatter_with_norm(self):
+ # added while fixing GH 45809
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)) * 100, columns=["a", "b", "c"]
+ )
+ norm = mpl.colors.LogNorm()
+ ax = df.plot.scatter(x="a", y="b", c="c", norm=norm)
+ assert ax.collections[0].norm is norm
+
+ def test_plot_scatter_without_norm(self):
+ # added while fixing GH 45809
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)) * 100, columns=["a", "b", "c"]
+ )
+ ax = df.plot.scatter(x="a", y="b", c="c")
+ plot_norm = ax.collections[0].norm
+ color_min_max = (df.c.min(), df.c.max())
+ default_norm = mpl.colors.Normalize(*color_min_max)
+ for value in df.c:
+ assert plot_norm(value) == default_norm(value)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "kwargs",
+ [
+ {},
+ {"legend": False},
+ {"default_axes": True, "subplots": True},
+ {"stacked": True},
+ ],
+ )
+ def test_plot_bar(self, kwargs):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["one", "two", "three", "four"],
+ )
+
+ _check_plot_works(df.plot.bar, **kwargs)
+
+ @pytest.mark.slow
+ def test_plot_bar_int_col(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 15)),
+ index=list(string.ascii_letters[:10]),
+ columns=range(15),
+ )
+ _check_plot_works(df.plot.bar)
+
+ @pytest.mark.slow
+ def test_plot_bar_ticks(self):
+ df = DataFrame({"a": [0, 1], "b": [1, 0]})
+ ax = _check_plot_works(df.plot.bar)
+ _check_ticks_props(ax, xrot=90)
+
+ ax = df.plot.bar(rot=35, fontsize=10)
+ _check_ticks_props(ax, xrot=35, xlabelsize=10, ylabelsize=10)
+
+ @pytest.mark.slow
+ def test_plot_barh_ticks(self):
+ df = DataFrame({"a": [0, 1], "b": [1, 0]})
+ ax = _check_plot_works(df.plot.barh)
+ _check_ticks_props(ax, yrot=0)
+
+ ax = df.plot.barh(rot=55, fontsize=11)
+ _check_ticks_props(ax, yrot=55, ylabelsize=11, xlabelsize=11)
+
+ def test_boxplot(self, hist_df):
+ df = hist_df
+ numeric_cols = df._get_numeric_data().columns
+ labels = [pprint_thing(c) for c in numeric_cols]
+
+ ax = _check_plot_works(df.plot.box)
+ _check_text_labels(ax.get_xticklabels(), labels)
+ tm.assert_numpy_array_equal(
+ ax.xaxis.get_ticklocs(), np.arange(1, len(numeric_cols) + 1)
+ )
+ assert len(ax.lines) == 7 * len(numeric_cols)
+
+ def test_boxplot_series(self, hist_df):
+ df = hist_df
+ series = df["height"]
+ axes = series.plot.box(rot=40)
+ _check_ticks_props(axes, xrot=40, yrot=0)
+
+ _check_plot_works(series.plot.box)
+
+ def test_boxplot_series_positions(self, hist_df):
+ df = hist_df
+ positions = np.array([1, 6, 7])
+ ax = df.plot.box(positions=positions)
+ numeric_cols = df._get_numeric_data().columns
+ labels = [pprint_thing(c) for c in numeric_cols]
+ _check_text_labels(ax.get_xticklabels(), labels)
+ tm.assert_numpy_array_equal(ax.xaxis.get_ticklocs(), positions)
+ assert len(ax.lines) == 7 * len(numeric_cols)
+
+ def test_boxplot_vertical(self, hist_df):
+ df = hist_df
+ numeric_cols = df._get_numeric_data().columns
+ labels = [pprint_thing(c) for c in numeric_cols]
+
+ # if horizontal, yticklabels are rotated
+ ax = df.plot.box(rot=50, fontsize=8, vert=False)
+ _check_ticks_props(ax, xrot=0, yrot=50, ylabelsize=8)
+ _check_text_labels(ax.get_yticklabels(), labels)
+ assert len(ax.lines) == 7 * len(numeric_cols)
+
+ @pytest.mark.filterwarnings("ignore:Attempt:UserWarning")
+ def test_boxplot_vertical_subplots(self, hist_df):
+ df = hist_df
+ numeric_cols = df._get_numeric_data().columns
+ labels = [pprint_thing(c) for c in numeric_cols]
+ axes = _check_plot_works(
+ df.plot.box,
+ default_axes=True,
+ subplots=True,
+ vert=False,
+ logx=True,
+ )
+ _check_axes_shape(axes, axes_num=3, layout=(1, 3))
+ _check_ax_scales(axes, xaxis="log")
+ for ax, label in zip(axes, labels):
+ _check_text_labels(ax.get_yticklabels(), [label])
+ assert len(ax.lines) == 7
+
+ def test_boxplot_vertical_positions(self, hist_df):
+ df = hist_df
+ numeric_cols = df._get_numeric_data().columns
+ labels = [pprint_thing(c) for c in numeric_cols]
+ positions = np.array([3, 2, 8])
+ ax = df.plot.box(positions=positions, vert=False)
+ _check_text_labels(ax.get_yticklabels(), labels)
+ tm.assert_numpy_array_equal(ax.yaxis.get_ticklocs(), positions)
+ assert len(ax.lines) == 7 * len(numeric_cols)
+
+ def test_boxplot_return_type_invalid(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["one", "two", "three", "four"],
+ )
+ msg = "return_type must be {None, 'axes', 'dict', 'both'}"
+ with pytest.raises(ValueError, match=msg):
+ df.plot.box(return_type="not_a_type")
+
+ @pytest.mark.parametrize("return_type", ["dict", "axes", "both"])
+ def test_boxplot_return_type_invalid_type(self, return_type):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((6, 4)),
+ index=list(string.ascii_letters[:6]),
+ columns=["one", "two", "three", "four"],
+ )
+ result = df.plot.box(return_type=return_type)
+ _check_box_return_type(result, return_type)
+
+ def test_kde_df(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((100, 4)))
+ ax = _check_plot_works(df.plot, kind="kde")
+ expected = [pprint_thing(c) for c in df.columns]
+ _check_legend_labels(ax, labels=expected)
+ _check_ticks_props(ax, xrot=0)
+
+ def test_kde_df_rot(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)))
+ ax = df.plot(kind="kde", rot=20, fontsize=5)
+ _check_ticks_props(ax, xrot=20, xlabelsize=5, ylabelsize=5)
+
+ def test_kde_df_subplots(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)))
+ axes = _check_plot_works(
+ df.plot,
+ default_axes=True,
+ kind="kde",
+ subplots=True,
+ )
+ _check_axes_shape(axes, axes_num=4, layout=(4, 1))
+
+ def test_kde_df_logy(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)))
+ axes = df.plot(kind="kde", logy=True, subplots=True)
+ _check_ax_scales(axes, yaxis="log")
+
+ def test_kde_missing_vals(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).uniform(size=(100, 4)))
+ df.loc[0, 0] = np.nan
+ _check_plot_works(df.plot, kind="kde")
+
+ def test_hist_df(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((100, 4)))
+
+ ax = _check_plot_works(df.plot.hist)
+ expected = [pprint_thing(c) for c in df.columns]
+ _check_legend_labels(ax, labels=expected)
+
+ axes = _check_plot_works(
+ df.plot.hist,
+ default_axes=True,
+ subplots=True,
+ logy=True,
+ )
+ _check_axes_shape(axes, axes_num=4, layout=(4, 1))
+ _check_ax_scales(axes, yaxis="log")
+
+ def test_hist_df_series(self):
+ series = Series(np.random.default_rng(2).random(10))
+ axes = series.plot.hist(rot=40)
+ _check_ticks_props(axes, xrot=40, yrot=0)
+
+ def test_hist_df_series_cumulative_density(self):
+ from matplotlib.patches import Rectangle
+
+ series = Series(np.random.default_rng(2).random(10))
+ ax = series.plot.hist(cumulative=True, bins=4, density=True)
+ # height of last bin (index 5) must be 1.0
+ rects = [x for x in ax.get_children() if isinstance(x, Rectangle)]
+ tm.assert_almost_equal(rects[-1].get_height(), 1.0)
+
+ def test_hist_df_series_cumulative(self):
+ from matplotlib.patches import Rectangle
+
+ series = Series(np.random.default_rng(2).random(10))
+ ax = series.plot.hist(cumulative=True, bins=4)
+ rects = [x for x in ax.get_children() if isinstance(x, Rectangle)]
+
+ tm.assert_almost_equal(rects[-2].get_height(), 10.0)
+
+ def test_hist_df_orientation(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)))
+ # if horizontal, yticklabels are rotated
+ axes = df.plot.hist(rot=50, fontsize=8, orientation="horizontal")
+ _check_ticks_props(axes, xrot=0, yrot=50, ylabelsize=8)
+
+ @pytest.mark.parametrize(
+ "weights", [0.1 * np.ones(shape=(100,)), 0.1 * np.ones(shape=(100, 2))]
+ )
+ def test_hist_weights(self, weights):
+ # GH 33173
+
+ df = DataFrame(
+ dict(zip(["A", "B"], np.random.default_rng(2).standard_normal((2, 100))))
+ )
+
+ ax1 = _check_plot_works(df.plot, kind="hist", weights=weights)
+ ax2 = _check_plot_works(df.plot, kind="hist")
+
+ patch_height_with_weights = [patch.get_height() for patch in ax1.patches]
+
+ # original heights with no weights, and we manually multiply with example
+ # weights, so after multiplication, they should be almost same
+ expected_patch_height = [0.1 * patch.get_height() for patch in ax2.patches]
+
+ tm.assert_almost_equal(patch_height_with_weights, expected_patch_height)
+
+ def _check_box_coord(
+ self,
+ patches,
+ expected_y=None,
+ expected_h=None,
+ expected_x=None,
+ expected_w=None,
+ ):
+ result_y = np.array([p.get_y() for p in patches])
+ result_height = np.array([p.get_height() for p in patches])
+ result_x = np.array([p.get_x() for p in patches])
+ result_width = np.array([p.get_width() for p in patches])
+ # dtype is depending on above values, no need to check
+
+ if expected_y is not None:
+ tm.assert_numpy_array_equal(result_y, expected_y, check_dtype=False)
+ if expected_h is not None:
+ tm.assert_numpy_array_equal(result_height, expected_h, check_dtype=False)
+ if expected_x is not None:
+ tm.assert_numpy_array_equal(result_x, expected_x, check_dtype=False)
+ if expected_w is not None:
+ tm.assert_numpy_array_equal(result_width, expected_w, check_dtype=False)
+
+ @pytest.mark.parametrize(
+ "data",
+ [
+ {
+ "A": np.repeat(np.array([1, 2, 3, 4, 5]), np.array([10, 9, 8, 7, 6])),
+ "B": np.repeat(np.array([1, 2, 3, 4, 5]), np.array([8, 8, 8, 8, 8])),
+ "C": np.repeat(np.array([1, 2, 3, 4, 5]), np.array([6, 7, 8, 9, 10])),
+ },
+ {
+ "A": np.repeat(
+ np.array([np.nan, 1, 2, 3, 4, 5]), np.array([3, 10, 9, 8, 7, 6])
+ ),
+ "B": np.repeat(
+ np.array([1, np.nan, 2, 3, 4, 5]), np.array([8, 3, 8, 8, 8, 8])
+ ),
+ "C": np.repeat(
+ np.array([1, 2, 3, np.nan, 4, 5]), np.array([6, 7, 8, 3, 9, 10])
+ ),
+ },
+ ],
+ )
+ def test_hist_df_coord(self, data):
+ df = DataFrame(data)
+
+ ax = df.plot.hist(bins=5)
+ self._check_box_coord(
+ ax.patches[:5],
+ expected_y=np.array([0, 0, 0, 0, 0]),
+ expected_h=np.array([10, 9, 8, 7, 6]),
+ )
+ self._check_box_coord(
+ ax.patches[5:10],
+ expected_y=np.array([0, 0, 0, 0, 0]),
+ expected_h=np.array([8, 8, 8, 8, 8]),
+ )
+ self._check_box_coord(
+ ax.patches[10:],
+ expected_y=np.array([0, 0, 0, 0, 0]),
+ expected_h=np.array([6, 7, 8, 9, 10]),
+ )
+
+ ax = df.plot.hist(bins=5, stacked=True)
+ self._check_box_coord(
+ ax.patches[:5],
+ expected_y=np.array([0, 0, 0, 0, 0]),
+ expected_h=np.array([10, 9, 8, 7, 6]),
+ )
+ self._check_box_coord(
+ ax.patches[5:10],
+ expected_y=np.array([10, 9, 8, 7, 6]),
+ expected_h=np.array([8, 8, 8, 8, 8]),
+ )
+ self._check_box_coord(
+ ax.patches[10:],
+ expected_y=np.array([18, 17, 16, 15, 14]),
+ expected_h=np.array([6, 7, 8, 9, 10]),
+ )
+
+ axes = df.plot.hist(bins=5, stacked=True, subplots=True)
+ self._check_box_coord(
+ axes[0].patches,
+ expected_y=np.array([0, 0, 0, 0, 0]),
+ expected_h=np.array([10, 9, 8, 7, 6]),
+ )
+ self._check_box_coord(
+ axes[1].patches,
+ expected_y=np.array([0, 0, 0, 0, 0]),
+ expected_h=np.array([8, 8, 8, 8, 8]),
+ )
+ self._check_box_coord(
+ axes[2].patches,
+ expected_y=np.array([0, 0, 0, 0, 0]),
+ expected_h=np.array([6, 7, 8, 9, 10]),
+ )
+
+ # horizontal
+ ax = df.plot.hist(bins=5, orientation="horizontal")
+ self._check_box_coord(
+ ax.patches[:5],
+ expected_x=np.array([0, 0, 0, 0, 0]),
+ expected_w=np.array([10, 9, 8, 7, 6]),
+ )
+ self._check_box_coord(
+ ax.patches[5:10],
+ expected_x=np.array([0, 0, 0, 0, 0]),
+ expected_w=np.array([8, 8, 8, 8, 8]),
+ )
+ self._check_box_coord(
+ ax.patches[10:],
+ expected_x=np.array([0, 0, 0, 0, 0]),
+ expected_w=np.array([6, 7, 8, 9, 10]),
+ )
+
+ ax = df.plot.hist(bins=5, stacked=True, orientation="horizontal")
+ self._check_box_coord(
+ ax.patches[:5],
+ expected_x=np.array([0, 0, 0, 0, 0]),
+ expected_w=np.array([10, 9, 8, 7, 6]),
+ )
+ self._check_box_coord(
+ ax.patches[5:10],
+ expected_x=np.array([10, 9, 8, 7, 6]),
+ expected_w=np.array([8, 8, 8, 8, 8]),
+ )
+ self._check_box_coord(
+ ax.patches[10:],
+ expected_x=np.array([18, 17, 16, 15, 14]),
+ expected_w=np.array([6, 7, 8, 9, 10]),
+ )
+
+ axes = df.plot.hist(
+ bins=5, stacked=True, subplots=True, orientation="horizontal"
+ )
+ self._check_box_coord(
+ axes[0].patches,
+ expected_x=np.array([0, 0, 0, 0, 0]),
+ expected_w=np.array([10, 9, 8, 7, 6]),
+ )
+ self._check_box_coord(
+ axes[1].patches,
+ expected_x=np.array([0, 0, 0, 0, 0]),
+ expected_w=np.array([8, 8, 8, 8, 8]),
+ )
+ self._check_box_coord(
+ axes[2].patches,
+ expected_x=np.array([0, 0, 0, 0, 0]),
+ expected_w=np.array([6, 7, 8, 9, 10]),
+ )
+
+ def test_plot_int_columns(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((100, 4))).cumsum()
+ _check_plot_works(df.plot, legend=True)
+
+ @pytest.mark.parametrize(
+ "markers",
+ [
+ {0: "^", 1: "+", 2: "o"},
+ {0: "^", 1: "+"},
+ ["^", "+", "o"],
+ ["^", "+"],
+ ],
+ )
+ def test_style_by_column(self, markers):
+ import matplotlib.pyplot as plt
+
+ fig = plt.gcf()
+ fig.clf()
+ fig.add_subplot(111)
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 3)))
+ ax = df.plot(style=markers)
+ for idx, line in enumerate(ax.get_lines()[: len(markers)]):
+ assert line.get_marker() == markers[idx]
+
+ def test_line_label_none(self):
+ s = Series([1, 2])
+ ax = s.plot()
+ assert ax.get_legend() is None
+
+ ax = s.plot(legend=True)
+ assert ax.get_legend().get_texts()[0].get_text() == ""
+
+ @pytest.mark.parametrize(
+ "props, expected",
+ [
+ ("boxprops", "boxes"),
+ ("whiskerprops", "whiskers"),
+ ("capprops", "caps"),
+ ("medianprops", "medians"),
+ ],
+ )
+ def test_specified_props_kwd_plot_box(self, props, expected):
+ # GH 30346
+ df = DataFrame({k: np.random.default_rng(2).random(100) for k in "ABC"})
+ kwd = {props: {"color": "C1"}}
+ result = df.plot.box(return_type="dict", **kwd)
+
+ assert result[expected][0].get_color() == "C1"
+
+ def test_unordered_ts(self):
+ # GH#2609, GH#55906
+ index = [date(2012, 10, 1), date(2012, 9, 1), date(2012, 8, 1)]
+ values = [3.0, 2.0, 1.0]
+ df = DataFrame(
+ np.array(values),
+ index=index,
+ columns=["test"],
+ )
+ ax = df.plot()
+ xticks = ax.lines[0].get_xdata()
+ tm.assert_numpy_array_equal(xticks, np.array(index, dtype=object))
+ ydata = ax.lines[0].get_ydata()
+ tm.assert_numpy_array_equal(ydata, np.array(values))
+
+ # even though we don't sort the data before passing it to matplotlib,
+ # the ticks are sorted
+ xticks = ax.xaxis.get_ticklabels()
+ xlocs = [x.get_position()[0] for x in xticks]
+ assert Index(xlocs).is_monotonic_increasing
+ xlabels = [x.get_text() for x in xticks]
+ assert pd.to_datetime(xlabels, format="%Y-%m-%d").is_monotonic_increasing
+
+ @pytest.mark.parametrize("kind", plotting.PlotAccessor._common_kinds)
+ def test_kind_both_ways(self, kind):
+ pytest.importorskip("scipy")
+ df = DataFrame({"x": [1, 2, 3]})
+ df.plot(kind=kind)
+ getattr(df.plot, kind)()
+
+ @pytest.mark.parametrize("kind", ["scatter", "hexbin"])
+ def test_kind_both_ways_x_y(self, kind):
+ pytest.importorskip("scipy")
+ df = DataFrame({"x": [1, 2, 3]})
+ df.plot("x", "x", kind=kind)
+ getattr(df.plot, kind)("x", "x")
+
+ @pytest.mark.parametrize("kind", plotting.PlotAccessor._common_kinds)
+ def test_all_invalid_plot_data(self, kind):
+ df = DataFrame(list("abcd"))
+ msg = "no numeric data to plot"
+ with pytest.raises(TypeError, match=msg):
+ df.plot(kind=kind)
+
+ @pytest.mark.parametrize(
+ "kind", list(plotting.PlotAccessor._common_kinds) + ["area"]
+ )
+ def test_partially_invalid_plot_data_numeric(self, kind):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 2)),
+ dtype=object,
+ )
+ df[np.random.default_rng(2).random(df.shape[0]) > 0.5] = "a"
+ msg = "no numeric data to plot"
+ with pytest.raises(TypeError, match=msg):
+ df.plot(kind=kind)
+
+ def test_invalid_kind(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
+ msg = "invalid_plot_kind is not a valid plot kind"
+ with pytest.raises(ValueError, match=msg):
+ df.plot(kind="invalid_plot_kind")
+
+ @pytest.mark.parametrize(
+ "x,y,lbl",
+ [
+ (["B", "C"], "A", "a"),
+ (["A"], ["B", "C"], ["b", "c"]),
+ ],
+ )
+ def test_invalid_xy_args(self, x, y, lbl):
+ # GH 18671, 19699 allows y to be list-like but not x
+ df = DataFrame({"A": [1, 2], "B": [3, 4], "C": [5, 6]})
+ with pytest.raises(ValueError, match="x must be a label or position"):
+ df.plot(x=x, y=y, label=lbl)
+
+ def test_bad_label(self):
+ df = DataFrame({"A": [1, 2], "B": [3, 4], "C": [5, 6]})
+ msg = "label should be list-like and same length as y"
+ with pytest.raises(ValueError, match=msg):
+ df.plot(x="A", y=["B", "C"], label="bad_label")
+
+ @pytest.mark.parametrize("x,y", [("A", "B"), (["A"], "B")])
+ def test_invalid_xy_args_dup_cols(self, x, y):
+ # GH 18671, 19699 allows y to be list-like but not x
+ df = DataFrame([[1, 3, 5], [2, 4, 6]], columns=list("AAB"))
+ with pytest.raises(ValueError, match="x must be a label or position"):
+ df.plot(x=x, y=y)
+
+ @pytest.mark.parametrize(
+ "x,y,lbl,colors",
+ [
+ ("A", ["B"], ["b"], ["red"]),
+ ("A", ["B", "C"], ["b", "c"], ["red", "blue"]),
+ (0, [1, 2], ["bokeh", "cython"], ["green", "yellow"]),
+ ],
+ )
+ def test_y_listlike(self, x, y, lbl, colors):
+ # GH 19699: tests list-like y and verifies lbls & colors
+ df = DataFrame({"A": [1, 2], "B": [3, 4], "C": [5, 6]})
+ _check_plot_works(df.plot, x="A", y=y, label=lbl)
+
+ ax = df.plot(x=x, y=y, label=lbl, color=colors)
+ assert len(ax.lines) == len(y)
+ _check_colors(ax.get_lines(), linecolors=colors)
+
+ @pytest.mark.parametrize("x,y,colnames", [(0, 1, ["A", "B"]), (1, 0, [0, 1])])
+ def test_xy_args_integer(self, x, y, colnames):
+ # GH 20056: tests integer args for xy and checks col names
+ df = DataFrame({"A": [1, 2], "B": [3, 4]})
+ df.columns = colnames
+ _check_plot_works(df.plot, x=x, y=y)
+
+ def test_hexbin_basic(self):
+ df = DataFrame(
+ {
+ "A": np.random.default_rng(2).uniform(size=20),
+ "B": np.random.default_rng(2).uniform(size=20),
+ "C": np.arange(20) + np.random.default_rng(2).uniform(size=20),
+ }
+ )
+
+ ax = df.plot.hexbin(x="A", y="B", gridsize=10)
+ # TODO: need better way to test. This just does existence.
+ assert len(ax.collections) == 1
+
+ def test_hexbin_basic_subplots(self):
+ df = DataFrame(
+ {
+ "A": np.random.default_rng(2).uniform(size=20),
+ "B": np.random.default_rng(2).uniform(size=20),
+ "C": np.arange(20) + np.random.default_rng(2).uniform(size=20),
+ }
+ )
+ # GH 6951
+ axes = df.plot.hexbin(x="A", y="B", subplots=True)
+ # hexbin should have 2 axes in the figure, 1 for plotting and another
+ # is colorbar
+ assert len(axes[0].figure.axes) == 2
+ # return value is single axes
+ _check_axes_shape(axes, axes_num=1, layout=(1, 1))
+
+ @pytest.mark.parametrize("reduce_C", [None, np.std])
+ def test_hexbin_with_c(self, reduce_C):
+ df = DataFrame(
+ {
+ "A": np.random.default_rng(2).uniform(size=20),
+ "B": np.random.default_rng(2).uniform(size=20),
+ "C": np.arange(20) + np.random.default_rng(2).uniform(size=20),
+ }
+ )
+
+ ax = df.plot.hexbin(x="A", y="B", C="C", reduce_C_function=reduce_C)
+ assert len(ax.collections) == 1
+
+ @pytest.mark.parametrize(
+ "kwargs, expected",
+ [
+ ({}, "BuGn"), # default cmap
+ ({"colormap": "cubehelix"}, "cubehelix"),
+ ({"cmap": "YlGn"}, "YlGn"),
+ ],
+ )
+ def test_hexbin_cmap(self, kwargs, expected):
+ df = DataFrame(
+ {
+ "A": np.random.default_rng(2).uniform(size=20),
+ "B": np.random.default_rng(2).uniform(size=20),
+ "C": np.arange(20) + np.random.default_rng(2).uniform(size=20),
+ }
+ )
+ ax = df.plot.hexbin(x="A", y="B", **kwargs)
+ assert ax.collections[0].cmap.name == expected
+
+ def test_pie_df_err(self):
+ df = DataFrame(
+ np.random.default_rng(2).random((5, 3)),
+ columns=["X", "Y", "Z"],
+ index=["a", "b", "c", "d", "e"],
+ )
+ msg = "pie requires either y column or 'subplots=True'"
+ with pytest.raises(ValueError, match=msg):
+ df.plot.pie()
+
+ @pytest.mark.parametrize("y", ["Y", 2])
+ def test_pie_df(self, y):
+ df = DataFrame(
+ np.random.default_rng(2).random((5, 3)),
+ columns=["X", "Y", "Z"],
+ index=["a", "b", "c", "d", "e"],
+ )
+ ax = _check_plot_works(df.plot.pie, y=y)
+ _check_text_labels(ax.texts, df.index)
+
+ def test_pie_df_subplots(self):
+ df = DataFrame(
+ np.random.default_rng(2).random((5, 3)),
+ columns=["X", "Y", "Z"],
+ index=["a", "b", "c", "d", "e"],
+ )
+ axes = _check_plot_works(
+ df.plot.pie,
+ default_axes=True,
+ subplots=True,
+ )
+ assert len(axes) == len(df.columns)
+ for ax in axes:
+ _check_text_labels(ax.texts, df.index)
+ for ax, ylabel in zip(axes, df.columns):
+ assert ax.get_ylabel() == ylabel
+
+ def test_pie_df_labels_colors(self):
+ df = DataFrame(
+ np.random.default_rng(2).random((5, 3)),
+ columns=["X", "Y", "Z"],
+ index=["a", "b", "c", "d", "e"],
+ )
+ labels = ["A", "B", "C", "D", "E"]
+ color_args = ["r", "g", "b", "c", "m"]
+ axes = _check_plot_works(
+ df.plot.pie,
+ default_axes=True,
+ subplots=True,
+ labels=labels,
+ colors=color_args,
+ )
+ assert len(axes) == len(df.columns)
+
+ for ax in axes:
+ _check_text_labels(ax.texts, labels)
+ _check_colors(ax.patches, facecolors=color_args)
+
+ def test_pie_df_nan(self):
+ df = DataFrame(np.random.default_rng(2).random((4, 4)))
+ for i in range(4):
+ df.iloc[i, i] = np.nan
+ _, axes = mpl.pyplot.subplots(ncols=4)
+
+ # GH 37668
+ kwargs = {"normalize": True}
+
+ with tm.assert_produces_warning(None):
+ df.plot.pie(subplots=True, ax=axes, legend=True, **kwargs)
+
+ base_expected = ["0", "1", "2", "3"]
+ for i, ax in enumerate(axes):
+ expected = list(base_expected) # force copy
+ expected[i] = ""
+ result = [x.get_text() for x in ax.texts]
+ assert result == expected
+
+ # legend labels
+ # NaN's not included in legend with subplots
+ # see https://github.com/pandas-dev/pandas/issues/8390
+ result_labels = [x.get_text() for x in ax.get_legend().get_texts()]
+ expected_labels = base_expected[:i] + base_expected[i + 1 :]
+ assert result_labels == expected_labels
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "kwargs",
+ [
+ {"logy": True},
+ {"logx": True, "logy": True},
+ {"loglog": True},
+ ],
+ )
+ def test_errorbar_plot(self, kwargs):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ df = DataFrame(d)
+ d_err = {"x": np.ones(12) * 0.2, "y": np.ones(12) * 0.4}
+ df_err = DataFrame(d_err)
+
+ # check line plots
+ ax = _check_plot_works(df.plot, yerr=df_err, **kwargs)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ @pytest.mark.slow
+ def test_errorbar_plot_bar(self):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ df = DataFrame(d)
+ d_err = {"x": np.ones(12) * 0.2, "y": np.ones(12) * 0.4}
+ df_err = DataFrame(d_err)
+ ax = _check_plot_works(
+ (df + 1).plot, yerr=df_err, xerr=df_err, kind="bar", log=True
+ )
+ _check_has_errorbars(ax, xerr=2, yerr=2)
+
+ @pytest.mark.slow
+ def test_errorbar_plot_yerr_array(self):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ df = DataFrame(d)
+ # yerr is raw error values
+ ax = _check_plot_works(df["y"].plot, yerr=np.ones(12) * 0.4)
+ _check_has_errorbars(ax, xerr=0, yerr=1)
+
+ ax = _check_plot_works(df.plot, yerr=np.ones((2, 12)) * 0.4)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("yerr", ["yerr", "誤差"])
+ def test_errorbar_plot_column_name(self, yerr):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ df = DataFrame(d)
+ df[yerr] = np.ones(12) * 0.2
+
+ ax = _check_plot_works(df.plot, yerr=yerr)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ ax = _check_plot_works(df.plot, y="y", x="x", yerr=yerr)
+ _check_has_errorbars(ax, xerr=0, yerr=1)
+
+ @pytest.mark.slow
+ def test_errorbar_plot_external_valueerror(self):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ df = DataFrame(d)
+ with tm.external_error_raised(ValueError):
+ df.plot(yerr=np.random.default_rng(2).standard_normal(11))
+
+ @pytest.mark.slow
+ def test_errorbar_plot_external_typeerror(self):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ df = DataFrame(d)
+ df_err = DataFrame({"x": ["zzz"] * 12, "y": ["zzz"] * 12})
+ with tm.external_error_raised(TypeError):
+ df.plot(yerr=df_err)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("kind", ["line", "bar", "barh"])
+ @pytest.mark.parametrize(
+ "y_err",
+ [
+ Series(np.ones(12) * 0.2, name="x"),
+ DataFrame({"x": np.ones(12) * 0.2, "y": np.ones(12) * 0.4}),
+ ],
+ )
+ def test_errorbar_plot_different_yerr(self, kind, y_err):
+ df = DataFrame({"x": np.arange(12), "y": np.arange(12, 0, -1)})
+
+ ax = _check_plot_works(df.plot, yerr=y_err, kind=kind)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("kind", ["line", "bar", "barh"])
+ @pytest.mark.parametrize(
+ "y_err, x_err",
+ [
+ (
+ DataFrame({"x": np.ones(12) * 0.2, "y": np.ones(12) * 0.4}),
+ DataFrame({"x": np.ones(12) * 0.2, "y": np.ones(12) * 0.4}),
+ ),
+ (Series(np.ones(12) * 0.2, name="x"), Series(np.ones(12) * 0.2, name="x")),
+ (0.2, 0.2),
+ ],
+ )
+ def test_errorbar_plot_different_yerr_xerr(self, kind, y_err, x_err):
+ df = DataFrame({"x": np.arange(12), "y": np.arange(12, 0, -1)})
+ ax = _check_plot_works(df.plot, yerr=y_err, xerr=x_err, kind=kind)
+ _check_has_errorbars(ax, xerr=2, yerr=2)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("kind", ["line", "bar", "barh"])
+ def test_errorbar_plot_different_yerr_xerr_subplots(self, kind):
+ df = DataFrame({"x": np.arange(12), "y": np.arange(12, 0, -1)})
+ df_err = DataFrame({"x": np.ones(12) * 0.2, "y": np.ones(12) * 0.4})
+ axes = _check_plot_works(
+ df.plot,
+ default_axes=True,
+ yerr=df_err,
+ xerr=df_err,
+ subplots=True,
+ kind=kind,
+ )
+ _check_has_errorbars(axes, xerr=1, yerr=1)
+
+ @pytest.mark.xfail(reason="Iterator is consumed", raises=ValueError)
+ def test_errorbar_plot_iterator(self):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ df = DataFrame(d)
+
+ # yerr is iterator
+ ax = _check_plot_works(df.plot, yerr=itertools.repeat(0.1, len(df)))
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ def test_errorbar_with_integer_column_names(self):
+ # test with integer column names
+ df = DataFrame(np.abs(np.random.default_rng(2).standard_normal((10, 2))))
+ df_err = DataFrame(np.abs(np.random.default_rng(2).standard_normal((10, 2))))
+ ax = _check_plot_works(df.plot, yerr=df_err)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+ ax = _check_plot_works(df.plot, y=0, yerr=1)
+ _check_has_errorbars(ax, xerr=0, yerr=1)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("kind", ["line", "bar"])
+ def test_errorbar_with_partial_columns_kind(self, kind):
+ df = DataFrame(np.abs(np.random.default_rng(2).standard_normal((10, 3))))
+ df_err = DataFrame(
+ np.abs(np.random.default_rng(2).standard_normal((10, 2))), columns=[0, 2]
+ )
+ ax = _check_plot_works(df.plot, yerr=df_err, kind=kind)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ @pytest.mark.slow
+ def test_errorbar_with_partial_columns_dti(self):
+ df = DataFrame(np.abs(np.random.default_rng(2).standard_normal((10, 3))))
+ df_err = DataFrame(
+ np.abs(np.random.default_rng(2).standard_normal((10, 2))), columns=[0, 2]
+ )
+ ix = date_range("1/1/2000", periods=10, freq="ME")
+ df.set_index(ix, inplace=True)
+ df_err.set_index(ix, inplace=True)
+ ax = _check_plot_works(df.plot, yerr=df_err, kind="line")
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("err_box", [lambda x: x, DataFrame])
+ def test_errorbar_with_partial_columns_box(self, err_box):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ df = DataFrame(d)
+ err = err_box({"x": np.ones(12) * 0.2, "z": np.ones(12) * 0.4})
+ ax = _check_plot_works(df.plot, yerr=err)
+ _check_has_errorbars(ax, xerr=0, yerr=1)
+
+ @pytest.mark.parametrize("kind", ["line", "bar", "barh"])
+ def test_errorbar_timeseries(self, kind):
+ d = {"x": np.arange(12), "y": np.arange(12, 0, -1)}
+ d_err = {"x": np.ones(12) * 0.2, "y": np.ones(12) * 0.4}
+
+ # check time-series plots
+ ix = date_range("1/1/2000", "1/1/2001", freq="ME")
+ tdf = DataFrame(d, index=ix)
+ tdf_err = DataFrame(d_err, index=ix)
+
+ ax = _check_plot_works(tdf.plot, yerr=tdf_err, kind=kind)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ ax = _check_plot_works(tdf.plot, yerr=d_err, kind=kind)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ ax = _check_plot_works(tdf.plot, y="y", yerr=tdf_err["x"], kind=kind)
+ _check_has_errorbars(ax, xerr=0, yerr=1)
+
+ ax = _check_plot_works(tdf.plot, y="y", yerr="x", kind=kind)
+ _check_has_errorbars(ax, xerr=0, yerr=1)
+
+ ax = _check_plot_works(tdf.plot, yerr=tdf_err, kind=kind)
+ _check_has_errorbars(ax, xerr=0, yerr=2)
+
+ axes = _check_plot_works(
+ tdf.plot,
+ default_axes=True,
+ kind=kind,
+ yerr=tdf_err,
+ subplots=True,
+ )
+ _check_has_errorbars(axes, xerr=0, yerr=1)
+
+ def test_errorbar_asymmetrical(self):
+ err = np.random.default_rng(2).random((3, 2, 5))
+
+ # each column is [0, 1, 2, 3, 4], [3, 4, 5, 6, 7]...
+ df = DataFrame(np.arange(15).reshape(3, 5)).T
+
+ ax = df.plot(yerr=err, xerr=err / 2)
+
+ yerr_0_0 = ax.collections[1].get_paths()[0].vertices[:, 1]
+ expected_0_0 = err[0, :, 0] * np.array([-1, 1])
+ tm.assert_almost_equal(yerr_0_0, expected_0_0)
+
+ msg = re.escape(
+ "Asymmetrical error bars should be provided with the shape (3, 2, 5)"
+ )
+ with pytest.raises(ValueError, match=msg):
+ df.plot(yerr=err.T)
+
+ def test_table(self):
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+ _check_plot_works(df.plot, table=True)
+ _check_plot_works(df.plot, table=df)
+
+ # GH 35945 UserWarning
+ with tm.assert_produces_warning(None):
+ ax = df.plot()
+ assert len(ax.tables) == 0
+ plotting.table(ax, df.T)
+ assert len(ax.tables) == 1
+
+ def test_errorbar_scatter(self):
+ df = DataFrame(
+ np.abs(np.random.default_rng(2).standard_normal((5, 2))),
+ index=range(5),
+ columns=["x", "y"],
+ )
+ df_err = DataFrame(
+ np.abs(np.random.default_rng(2).standard_normal((5, 2))) / 5,
+ index=range(5),
+ columns=["x", "y"],
+ )
+
+ ax = _check_plot_works(df.plot.scatter, x="x", y="y")
+ _check_has_errorbars(ax, xerr=0, yerr=0)
+ ax = _check_plot_works(df.plot.scatter, x="x", y="y", xerr=df_err)
+ _check_has_errorbars(ax, xerr=1, yerr=0)
+
+ ax = _check_plot_works(df.plot.scatter, x="x", y="y", yerr=df_err)
+ _check_has_errorbars(ax, xerr=0, yerr=1)
+ ax = _check_plot_works(df.plot.scatter, x="x", y="y", xerr=df_err, yerr=df_err)
+ _check_has_errorbars(ax, xerr=1, yerr=1)
+
+ def test_errorbar_scatter_color(self):
+ def _check_errorbar_color(containers, expected, has_err="has_xerr"):
+ lines = []
+ errs = next(c.lines for c in ax.containers if getattr(c, has_err, False))
+ for el in errs:
+ if is_list_like(el):
+ lines.extend(el)
+ else:
+ lines.append(el)
+ err_lines = [x for x in lines if x in ax.collections]
+ _check_colors(err_lines, linecolors=np.array([expected] * len(err_lines)))
+
+ # GH 8081
+ df = DataFrame(
+ np.abs(np.random.default_rng(2).standard_normal((10, 5))),
+ columns=["a", "b", "c", "d", "e"],
+ )
+ ax = df.plot.scatter(x="a", y="b", xerr="d", yerr="e", c="red")
+ _check_has_errorbars(ax, xerr=1, yerr=1)
+ _check_errorbar_color(ax.containers, "red", has_err="has_xerr")
+ _check_errorbar_color(ax.containers, "red", has_err="has_yerr")
+
+ ax = df.plot.scatter(x="a", y="b", yerr="e", color="green")
+ _check_has_errorbars(ax, xerr=0, yerr=1)
+ _check_errorbar_color(ax.containers, "green", has_err="has_yerr")
+
+ def test_scatter_unknown_colormap(self):
+ # GH#48726
+ df = DataFrame({"a": [1, 2, 3], "b": 4})
+ with pytest.raises((ValueError, KeyError), match="'unknown' is not a"):
+ df.plot(x="a", y="b", colormap="unknown", kind="scatter")
+
+ def test_sharex_and_ax(self):
+ # https://github.com/pandas-dev/pandas/issues/9737 using gridspec,
+ # the axis in fig.get_axis() are sorted differently than pandas
+ # expected them, so make sure that only the right ones are removed
+ import matplotlib.pyplot as plt
+
+ plt.close("all")
+ gs, axes = _generate_4_axes_via_gridspec()
+
+ df = DataFrame(
+ {
+ "a": [1, 2, 3, 4, 5, 6],
+ "b": [1, 2, 3, 4, 5, 6],
+ "c": [1, 2, 3, 4, 5, 6],
+ "d": [1, 2, 3, 4, 5, 6],
+ }
+ )
+
+ def _check(axes):
+ for ax in axes:
+ assert len(ax.lines) == 1
+ _check_visible(ax.get_yticklabels(), visible=True)
+ for ax in [axes[0], axes[2]]:
+ _check_visible(ax.get_xticklabels(), visible=False)
+ _check_visible(ax.get_xticklabels(minor=True), visible=False)
+ for ax in [axes[1], axes[3]]:
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+
+ for ax in axes:
+ df.plot(x="a", y="b", title="title", ax=ax, sharex=True)
+ gs.tight_layout(plt.gcf())
+ _check(axes)
+ plt.close("all")
+
+ gs, axes = _generate_4_axes_via_gridspec()
+ with tm.assert_produces_warning(UserWarning):
+ axes = df.plot(subplots=True, ax=axes, sharex=True)
+ _check(axes)
+
+ def test_sharex_false_and_ax(self):
+ # https://github.com/pandas-dev/pandas/issues/9737 using gridspec,
+ # the axis in fig.get_axis() are sorted differently than pandas
+ # expected them, so make sure that only the right ones are removed
+ import matplotlib.pyplot as plt
+
+ df = DataFrame(
+ {
+ "a": [1, 2, 3, 4, 5, 6],
+ "b": [1, 2, 3, 4, 5, 6],
+ "c": [1, 2, 3, 4, 5, 6],
+ "d": [1, 2, 3, 4, 5, 6],
+ }
+ )
+ gs, axes = _generate_4_axes_via_gridspec()
+ # without sharex, no labels should be touched!
+ for ax in axes:
+ df.plot(x="a", y="b", title="title", ax=ax)
+
+ gs.tight_layout(plt.gcf())
+ for ax in axes:
+ assert len(ax.lines) == 1
+ _check_visible(ax.get_yticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+
+ def test_sharey_and_ax(self):
+ # https://github.com/pandas-dev/pandas/issues/9737 using gridspec,
+ # the axis in fig.get_axis() are sorted differently than pandas
+ # expected them, so make sure that only the right ones are removed
+ import matplotlib.pyplot as plt
+
+ gs, axes = _generate_4_axes_via_gridspec()
+
+ df = DataFrame(
+ {
+ "a": [1, 2, 3, 4, 5, 6],
+ "b": [1, 2, 3, 4, 5, 6],
+ "c": [1, 2, 3, 4, 5, 6],
+ "d": [1, 2, 3, 4, 5, 6],
+ }
+ )
+
+ def _check(axes):
+ for ax in axes:
+ assert len(ax.lines) == 1
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+ for ax in [axes[0], axes[1]]:
+ _check_visible(ax.get_yticklabels(), visible=True)
+ for ax in [axes[2], axes[3]]:
+ _check_visible(ax.get_yticklabels(), visible=False)
+
+ for ax in axes:
+ df.plot(x="a", y="b", title="title", ax=ax, sharey=True)
+ gs.tight_layout(plt.gcf())
+ _check(axes)
+ plt.close("all")
+
+ gs, axes = _generate_4_axes_via_gridspec()
+ with tm.assert_produces_warning(UserWarning):
+ axes = df.plot(subplots=True, ax=axes, sharey=True)
+
+ gs.tight_layout(plt.gcf())
+ _check(axes)
+
+ def test_sharey_and_ax_tight(self):
+ # https://github.com/pandas-dev/pandas/issues/9737 using gridspec,
+ import matplotlib.pyplot as plt
+
+ df = DataFrame(
+ {
+ "a": [1, 2, 3, 4, 5, 6],
+ "b": [1, 2, 3, 4, 5, 6],
+ "c": [1, 2, 3, 4, 5, 6],
+ "d": [1, 2, 3, 4, 5, 6],
+ }
+ )
+ gs, axes = _generate_4_axes_via_gridspec()
+ # without sharex, no labels should be touched!
+ for ax in axes:
+ df.plot(x="a", y="b", title="title", ax=ax)
+
+ gs.tight_layout(plt.gcf())
+ for ax in axes:
+ assert len(ax.lines) == 1
+ _check_visible(ax.get_yticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+
+ @pytest.mark.parametrize("kind", plotting.PlotAccessor._all_kinds)
+ def test_memory_leak(self, kind):
+ """Check that every plot type gets properly collected."""
+ pytest.importorskip("scipy")
+ args = {}
+ if kind in ["hexbin", "scatter", "pie"]:
+ df = DataFrame(
+ {
+ "A": np.random.default_rng(2).uniform(size=20),
+ "B": np.random.default_rng(2).uniform(size=20),
+ "C": np.arange(20) + np.random.default_rng(2).uniform(size=20),
+ }
+ )
+ args = {"x": "A", "y": "B"}
+ elif kind == "area":
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ ).abs()
+ else:
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+
+ # Use a weakref so we can see if the object gets collected without
+ # also preventing it from being collected
+ ref = weakref.ref(df.plot(kind=kind, **args))
+
+ # have matplotlib delete all the figures
+ plt.close("all")
+ # force a garbage collection
+ gc.collect()
+ assert ref() is None
+
+ def test_df_gridspec_patterns_vert_horiz(self):
+ # GH 10819
+ from matplotlib import gridspec
+ import matplotlib.pyplot as plt
+
+ ts = Series(
+ np.random.default_rng(2).standard_normal(10),
+ index=date_range("1/1/2000", periods=10),
+ )
+
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 2)),
+ index=ts.index,
+ columns=list("AB"),
+ )
+
+ def _get_vertical_grid():
+ gs = gridspec.GridSpec(3, 1)
+ fig = plt.figure()
+ ax1 = fig.add_subplot(gs[:2, :])
+ ax2 = fig.add_subplot(gs[2, :])
+ return ax1, ax2
+
+ def _get_horizontal_grid():
+ gs = gridspec.GridSpec(1, 3)
+ fig = plt.figure()
+ ax1 = fig.add_subplot(gs[:, :2])
+ ax2 = fig.add_subplot(gs[:, 2])
+ return ax1, ax2
+
+ for ax1, ax2 in [_get_vertical_grid(), _get_horizontal_grid()]:
+ ax1 = ts.plot(ax=ax1)
+ assert len(ax1.lines) == 1
+ ax2 = df.plot(ax=ax2)
+ assert len(ax2.lines) == 2
+ for ax in [ax1, ax2]:
+ _check_visible(ax.get_yticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+ plt.close("all")
+
+ # subplots=True
+ for ax1, ax2 in [_get_vertical_grid(), _get_horizontal_grid()]:
+ axes = df.plot(subplots=True, ax=[ax1, ax2])
+ assert len(ax1.lines) == 1
+ assert len(ax2.lines) == 1
+ for ax in axes:
+ _check_visible(ax.get_yticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+ plt.close("all")
+
+ # vertical / subplots / sharex=True / sharey=True
+ ax1, ax2 = _get_vertical_grid()
+ with tm.assert_produces_warning(UserWarning):
+ axes = df.plot(subplots=True, ax=[ax1, ax2], sharex=True, sharey=True)
+ assert len(axes[0].lines) == 1
+ assert len(axes[1].lines) == 1
+ for ax in [ax1, ax2]:
+ # yaxis are visible because there is only one column
+ _check_visible(ax.get_yticklabels(), visible=True)
+ # xaxis of axes0 (top) are hidden
+ _check_visible(axes[0].get_xticklabels(), visible=False)
+ _check_visible(axes[0].get_xticklabels(minor=True), visible=False)
+ _check_visible(axes[1].get_xticklabels(), visible=True)
+ _check_visible(axes[1].get_xticklabels(minor=True), visible=True)
+ plt.close("all")
+
+ # horizontal / subplots / sharex=True / sharey=True
+ ax1, ax2 = _get_horizontal_grid()
+ with tm.assert_produces_warning(UserWarning):
+ axes = df.plot(subplots=True, ax=[ax1, ax2], sharex=True, sharey=True)
+ assert len(axes[0].lines) == 1
+ assert len(axes[1].lines) == 1
+ _check_visible(axes[0].get_yticklabels(), visible=True)
+ # yaxis of axes1 (right) are hidden
+ _check_visible(axes[1].get_yticklabels(), visible=False)
+ for ax in [ax1, ax2]:
+ # xaxis are visible because there is only one column
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+ plt.close("all")
+
+ def test_df_gridspec_patterns_boxed(self):
+ # GH 10819
+ from matplotlib import gridspec
+ import matplotlib.pyplot as plt
+
+ ts = Series(
+ np.random.default_rng(2).standard_normal(10),
+ index=date_range("1/1/2000", periods=10),
+ )
+
+ # boxed
+ def _get_boxed_grid():
+ gs = gridspec.GridSpec(3, 3)
+ fig = plt.figure()
+ ax1 = fig.add_subplot(gs[:2, :2])
+ ax2 = fig.add_subplot(gs[:2, 2])
+ ax3 = fig.add_subplot(gs[2, :2])
+ ax4 = fig.add_subplot(gs[2, 2])
+ return ax1, ax2, ax3, ax4
+
+ axes = _get_boxed_grid()
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ index=ts.index,
+ columns=list("ABCD"),
+ )
+ axes = df.plot(subplots=True, ax=axes)
+ for ax in axes:
+ assert len(ax.lines) == 1
+ # axis are visible because these are not shared
+ _check_visible(ax.get_yticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+ plt.close("all")
+
+ # subplots / sharex=True / sharey=True
+ axes = _get_boxed_grid()
+ with tm.assert_produces_warning(UserWarning):
+ axes = df.plot(subplots=True, ax=axes, sharex=True, sharey=True)
+ for ax in axes:
+ assert len(ax.lines) == 1
+ for ax in [axes[0], axes[2]]: # left column
+ _check_visible(ax.get_yticklabels(), visible=True)
+ for ax in [axes[1], axes[3]]: # right column
+ _check_visible(ax.get_yticklabels(), visible=False)
+ for ax in [axes[0], axes[1]]: # top row
+ _check_visible(ax.get_xticklabels(), visible=False)
+ _check_visible(ax.get_xticklabels(minor=True), visible=False)
+ for ax in [axes[2], axes[3]]: # bottom row
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+ plt.close("all")
+
+ def test_df_grid_settings(self):
+ # Make sure plot defaults to rcParams['axes.grid'] setting, GH 9792
+ _check_grid_settings(
+ DataFrame({"a": [1, 2, 3], "b": [2, 3, 4]}),
+ plotting.PlotAccessor._dataframe_kinds,
+ kws={"x": "a", "y": "b"},
+ )
+
+ def test_plain_axes(self):
+ # supplied ax itself is a SubplotAxes, but figure contains also
+ # a plain Axes object (GH11556)
+ fig, ax = mpl.pyplot.subplots()
+ fig.add_axes([0.2, 0.2, 0.2, 0.2])
+ Series(np.random.default_rng(2).random(10)).plot(ax=ax)
+
+ def test_plain_axes_df(self):
+ # supplied ax itself is a plain Axes, but because the cmap keyword
+ # a new ax is created for the colorbar -> also multiples axes (GH11520)
+ df = DataFrame(
+ {
+ "a": np.random.default_rng(2).standard_normal(8),
+ "b": np.random.default_rng(2).standard_normal(8),
+ }
+ )
+ fig = mpl.pyplot.figure()
+ ax = fig.add_axes((0, 0, 1, 1))
+ df.plot(kind="scatter", ax=ax, x="a", y="b", c="a", cmap="hsv")
+
+ def test_plain_axes_make_axes_locatable(self):
+ # other examples
+ fig, ax = mpl.pyplot.subplots()
+ from mpl_toolkits.axes_grid1 import make_axes_locatable
+
+ divider = make_axes_locatable(ax)
+ cax = divider.append_axes("right", size="5%", pad=0.05)
+ Series(np.random.default_rng(2).random(10)).plot(ax=ax)
+ Series(np.random.default_rng(2).random(10)).plot(ax=cax)
+
+ def test_plain_axes_make_inset_axes(self):
+ fig, ax = mpl.pyplot.subplots()
+ from mpl_toolkits.axes_grid1.inset_locator import inset_axes
+
+ iax = inset_axes(ax, width="30%", height=1.0, loc=3)
+ Series(np.random.default_rng(2).random(10)).plot(ax=ax)
+ Series(np.random.default_rng(2).random(10)).plot(ax=iax)
+
+ @pytest.mark.parametrize("method", ["line", "barh", "bar"])
+ def test_secondary_axis_font_size(self, method):
+ # GH: 12565
+ df = (
+ DataFrame(
+ np.random.default_rng(2).standard_normal((15, 2)), columns=list("AB")
+ )
+ .assign(C=lambda df: df.B.cumsum())
+ .assign(D=lambda df: df.C * 1.1)
+ )
+
+ fontsize = 20
+ sy = ["C", "D"]
+
+ kwargs = {"secondary_y": sy, "fontsize": fontsize, "mark_right": True}
+ ax = getattr(df.plot, method)(**kwargs)
+ _check_ticks_props(axes=ax.right_ax, ylabelsize=fontsize)
+
+ def test_x_string_values_ticks(self):
+ # Test if string plot index have a fixed xtick position
+ # GH: 7612, GH: 22334
+ df = DataFrame(
+ {
+ "sales": [3, 2, 3],
+ "visits": [20, 42, 28],
+ "day": ["Monday", "Tuesday", "Wednesday"],
+ }
+ )
+ ax = df.plot.area(x="day")
+ ax.set_xlim(-1, 3)
+ xticklabels = [t.get_text() for t in ax.get_xticklabels()]
+ labels_position = dict(zip(xticklabels, ax.get_xticks()))
+ # Testing if the label stayed at the right position
+ assert labels_position["Monday"] == 0.0
+ assert labels_position["Tuesday"] == 1.0
+ assert labels_position["Wednesday"] == 2.0
+
+ def test_x_multiindex_values_ticks(self):
+ # Test if multiindex plot index have a fixed xtick position
+ # GH: 15912
+ index = MultiIndex.from_product([[2012, 2013], [1, 2]])
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((4, 2)),
+ columns=["A", "B"],
+ index=index,
+ )
+ ax = df.plot()
+ ax.set_xlim(-1, 4)
+ xticklabels = [t.get_text() for t in ax.get_xticklabels()]
+ labels_position = dict(zip(xticklabels, ax.get_xticks()))
+ # Testing if the label stayed at the right position
+ assert labels_position["(2012, 1)"] == 0.0
+ assert labels_position["(2012, 2)"] == 1.0
+ assert labels_position["(2013, 1)"] == 2.0
+ assert labels_position["(2013, 2)"] == 3.0
+
+ @pytest.mark.parametrize("kind", ["line", "area"])
+ def test_xlim_plot_line(self, kind):
+ # test if xlim is set correctly in plot.line and plot.area
+ # GH 27686
+ df = DataFrame([2, 4], index=[1, 2])
+ ax = df.plot(kind=kind)
+ xlims = ax.get_xlim()
+ assert xlims[0] < 1
+ assert xlims[1] > 2
+
+ def test_xlim_plot_line_correctly_in_mixed_plot_type(self):
+ # test if xlim is set correctly when ax contains multiple different kinds
+ # of plots, GH 27686
+ fig, ax = mpl.pyplot.subplots()
+
+ indexes = ["k1", "k2", "k3", "k4"]
+ df = DataFrame(
+ {
+ "s1": [1000, 2000, 1500, 2000],
+ "s2": [900, 1400, 2000, 3000],
+ "s3": [1500, 1500, 1600, 1200],
+ "secondary_y": [1, 3, 4, 3],
+ },
+ index=indexes,
+ )
+ df[["s1", "s2", "s3"]].plot.bar(ax=ax, stacked=False)
+ df[["secondary_y"]].plot(ax=ax, secondary_y=True)
+
+ xlims = ax.get_xlim()
+ assert xlims[0] < 0
+ assert xlims[1] > 3
+
+ # make sure axis labels are plotted correctly as well
+ xticklabels = [t.get_text() for t in ax.get_xticklabels()]
+ assert xticklabels == indexes
+
+ def test_plot_no_rows(self):
+ # GH 27758
+ df = DataFrame(columns=["foo"], dtype=int)
+ assert df.empty
+ ax = df.plot()
+ assert len(ax.get_lines()) == 1
+ line = ax.get_lines()[0]
+ assert len(line.get_xdata()) == 0
+ assert len(line.get_ydata()) == 0
+
+ def test_plot_no_numeric_data(self):
+ df = DataFrame(["a", "b", "c"])
+ with pytest.raises(TypeError, match="no numeric data to plot"):
+ df.plot()
+
+ @pytest.mark.parametrize(
+ "kind", ("line", "bar", "barh", "hist", "kde", "density", "area", "pie")
+ )
+ def test_group_subplot(self, kind):
+ pytest.importorskip("scipy")
+ d = {
+ "a": np.arange(10),
+ "b": np.arange(10) + 1,
+ "c": np.arange(10) + 1,
+ "d": np.arange(10),
+ "e": np.arange(10),
+ }
+ df = DataFrame(d)
+
+ axes = df.plot(subplots=[("b", "e"), ("c", "d")], kind=kind)
+ assert len(axes) == 3 # 2 groups + single column a
+
+ expected_labels = (["b", "e"], ["c", "d"], ["a"])
+ for ax, labels in zip(axes, expected_labels):
+ if kind != "pie":
+ _check_legend_labels(ax, labels=labels)
+ if kind == "line":
+ assert len(ax.lines) == len(labels)
+
+ def test_group_subplot_series_notimplemented(self):
+ ser = Series(range(1))
+ msg = "An iterable subplots for a Series"
+ with pytest.raises(NotImplementedError, match=msg):
+ ser.plot(subplots=[("a",)])
+
+ def test_group_subplot_multiindex_notimplemented(self):
+ df = DataFrame(np.eye(2), columns=MultiIndex.from_tuples([(0, 1), (1, 2)]))
+ msg = "An iterable subplots for a DataFrame with a MultiIndex"
+ with pytest.raises(NotImplementedError, match=msg):
+ df.plot(subplots=[(0, 1)])
+
+ def test_group_subplot_nonunique_cols_notimplemented(self):
+ df = DataFrame(np.eye(2), columns=["a", "a"])
+ msg = "An iterable subplots for a DataFrame with non-unique"
+ with pytest.raises(NotImplementedError, match=msg):
+ df.plot(subplots=[("a",)])
+
+ @pytest.mark.parametrize(
+ "subplots, expected_msg",
+ [
+ (123, "subplots should be a bool or an iterable"),
+ ("a", "each entry should be a list/tuple"), # iterable of non-iterable
+ ((1,), "each entry should be a list/tuple"), # iterable of non-iterable
+ (("a",), "each entry should be a list/tuple"), # iterable of strings
+ ],
+ )
+ def test_group_subplot_bad_input(self, subplots, expected_msg):
+ # Make sure error is raised when subplots is not a properly
+ # formatted iterable. Only iterables of iterables are permitted, and
+ # entries should not be strings.
+ d = {"a": np.arange(10), "b": np.arange(10)}
+ df = DataFrame(d)
+
+ with pytest.raises(ValueError, match=expected_msg):
+ df.plot(subplots=subplots)
+
+ def test_group_subplot_invalid_column_name(self):
+ d = {"a": np.arange(10), "b": np.arange(10)}
+ df = DataFrame(d)
+
+ if Version(np.__version__) < Version("2.0.0"):
+ with pytest.raises(ValueError, match=r"Column label\(s\) \['bad_name'\]"):
+ df.plot(subplots=[("a", "bad_name")])
+ else:
+ with pytest.raises(
+ ValueError, match=r"Column label\(s\) \[np\.str\_\('bad_name'\)\]"
+ ):
+ df.plot(subplots=[("a", "bad_name")])
+
+ def test_group_subplot_duplicated_column(self):
+ d = {"a": np.arange(10), "b": np.arange(10), "c": np.arange(10)}
+ df = DataFrame(d)
+
+ with pytest.raises(ValueError, match="should be in only one subplot"):
+ df.plot(subplots=[("a", "b"), ("a", "c")])
+
+ @pytest.mark.parametrize("kind", ("box", "scatter", "hexbin"))
+ def test_group_subplot_invalid_kind(self, kind):
+ d = {"a": np.arange(10), "b": np.arange(10)}
+ df = DataFrame(d)
+ with pytest.raises(
+ ValueError, match="When subplots is an iterable, kind must be one of"
+ ):
+ df.plot(subplots=[("a", "b")], kind=kind)
+
+ @pytest.mark.parametrize(
+ "index_name, old_label, new_label",
+ [
+ (None, "", "new"),
+ ("old", "old", "new"),
+ (None, "", ""),
+ (None, "", 1),
+ (None, "", [1, 2]),
+ ],
+ )
+ @pytest.mark.parametrize("kind", ["line", "area", "bar"])
+ def test_xlabel_ylabel_dataframe_single_plot(
+ self, kind, index_name, old_label, new_label
+ ):
+ # GH 9093
+ df = DataFrame([[1, 2], [2, 5]], columns=["Type A", "Type B"])
+ df.index.name = index_name
+
+ # default is the ylabel is not shown and xlabel is index name
+ ax = df.plot(kind=kind)
+ assert ax.get_xlabel() == old_label
+ assert ax.get_ylabel() == ""
+
+ # old xlabel will be overridden and assigned ylabel will be used as ylabel
+ ax = df.plot(kind=kind, ylabel=new_label, xlabel=new_label)
+ assert ax.get_ylabel() == str(new_label)
+ assert ax.get_xlabel() == str(new_label)
+
+ @pytest.mark.parametrize(
+ "xlabel, ylabel",
+ [
+ (None, None),
+ ("X Label", None),
+ (None, "Y Label"),
+ ("X Label", "Y Label"),
+ ],
+ )
+ @pytest.mark.parametrize("kind", ["scatter", "hexbin"])
+ def test_xlabel_ylabel_dataframe_plane_plot(self, kind, xlabel, ylabel):
+ # GH 37001
+ xcol = "Type A"
+ ycol = "Type B"
+ df = DataFrame([[1, 2], [2, 5]], columns=[xcol, ycol])
+
+ # default is the labels are column names
+ ax = df.plot(kind=kind, x=xcol, y=ycol, xlabel=xlabel, ylabel=ylabel)
+ assert ax.get_xlabel() == (xcol if xlabel is None else xlabel)
+ assert ax.get_ylabel() == (ycol if ylabel is None else ylabel)
+
+ @pytest.mark.parametrize("secondary_y", (False, True))
+ def test_secondary_y(self, secondary_y):
+ ax_df = DataFrame([0]).plot(
+ secondary_y=secondary_y, ylabel="Y", ylim=(0, 100), yticks=[99]
+ )
+ for ax in ax_df.figure.axes:
+ if ax.yaxis.get_visible():
+ assert ax.get_ylabel() == "Y"
+ assert ax.get_ylim() == (0, 100)
+ assert ax.get_yticks()[0] == 99
+
+ @pytest.mark.slow
+ def test_plot_no_warning(self):
+ # GH 55138
+ # TODO(3.0): this can be removed once Period[B] deprecation is enforced
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=Index(list("ABCD"), dtype=object),
+ index=date_range("2000-01-01", periods=10, freq="B"),
+ )
+ with tm.assert_produces_warning(False):
+ _ = df.plot()
+ _ = df.T.plot()
+
+
+def _generate_4_axes_via_gridspec():
+ import matplotlib.pyplot as plt
+
+ gs = mpl.gridspec.GridSpec(2, 2)
+ ax_tl = plt.subplot(gs[0, 0])
+ ax_ll = plt.subplot(gs[1, 0])
+ ax_tr = plt.subplot(gs[0, 1])
+ ax_lr = plt.subplot(gs[1, 1])
+
+ return gs, [ax_tl, ax_ll, ax_tr, ax_lr]
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_color.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_color.py
new file mode 100644
index 0000000000000000000000000000000000000000..ff1edd323ef280cef5e7e79aa809906434a86407
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_color.py
@@ -0,0 +1,670 @@
+""" Test cases for DataFrame.plot """
+import re
+
+import numpy as np
+import pytest
+
+import pandas as pd
+from pandas import DataFrame
+import pandas._testing as tm
+from pandas.tests.plotting.common import (
+ _check_colors,
+ _check_plot_works,
+ _unpack_cycler,
+)
+from pandas.util.version import Version
+
+mpl = pytest.importorskip("matplotlib")
+plt = pytest.importorskip("matplotlib.pyplot")
+cm = pytest.importorskip("matplotlib.cm")
+
+
+def _check_colors_box(bp, box_c, whiskers_c, medians_c, caps_c="k", fliers_c=None):
+ if fliers_c is None:
+ fliers_c = "k"
+ _check_colors(bp["boxes"], linecolors=[box_c] * len(bp["boxes"]))
+ _check_colors(bp["whiskers"], linecolors=[whiskers_c] * len(bp["whiskers"]))
+ _check_colors(bp["medians"], linecolors=[medians_c] * len(bp["medians"]))
+ _check_colors(bp["fliers"], linecolors=[fliers_c] * len(bp["fliers"]))
+ _check_colors(bp["caps"], linecolors=[caps_c] * len(bp["caps"]))
+
+
+class TestDataFrameColor:
+ @pytest.mark.parametrize(
+ "color", ["C0", "C1", "C2", "C3", "C4", "C5", "C6", "C7", "C8", "C9"]
+ )
+ def test_mpl2_color_cycle_str(self, color):
+ # GH 15516
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 3)), columns=["a", "b", "c"]
+ )
+ _check_plot_works(df.plot, color=color)
+
+ def test_color_single_series_list(self):
+ # GH 3486
+ df = DataFrame({"A": [1, 2, 3]})
+ _check_plot_works(df.plot, color=["red"])
+
+ @pytest.mark.parametrize("color", [(1, 0, 0), (1, 0, 0, 0.5)])
+ def test_rgb_tuple_color(self, color):
+ # GH 16695
+ df = DataFrame({"x": [1, 2], "y": [3, 4]})
+ _check_plot_works(df.plot, x="x", y="y", color=color)
+
+ def test_color_empty_string(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
+ with pytest.raises(ValueError, match="Invalid color argument:"):
+ df.plot(color="")
+
+ def test_color_and_style_arguments(self):
+ df = DataFrame({"x": [1, 2], "y": [3, 4]})
+ # passing both 'color' and 'style' arguments should be allowed
+ # if there is no color symbol in the style strings:
+ ax = df.plot(color=["red", "black"], style=["-", "--"])
+ # check that the linestyles are correctly set:
+ linestyle = [line.get_linestyle() for line in ax.lines]
+ assert linestyle == ["-", "--"]
+ # check that the colors are correctly set:
+ color = [line.get_color() for line in ax.lines]
+ assert color == ["red", "black"]
+ # passing both 'color' and 'style' arguments should not be allowed
+ # if there is a color symbol in the style strings:
+ msg = (
+ "Cannot pass 'style' string with a color symbol and 'color' keyword "
+ "argument. Please use one or the other or pass 'style' without a color "
+ "symbol"
+ )
+ with pytest.raises(ValueError, match=msg):
+ df.plot(color=["red", "black"], style=["k-", "r--"])
+
+ @pytest.mark.parametrize(
+ "color, expected",
+ [
+ ("green", ["green"] * 4),
+ (["yellow", "red", "green", "blue"], ["yellow", "red", "green", "blue"]),
+ ],
+ )
+ def test_color_and_marker(self, color, expected):
+ # GH 21003
+ df = DataFrame(np.random.default_rng(2).random((7, 4)))
+ ax = df.plot(color=color, style="d--")
+ # check colors
+ result = [i.get_color() for i in ax.lines]
+ assert result == expected
+ # check markers and linestyles
+ assert all(i.get_linestyle() == "--" for i in ax.lines)
+ assert all(i.get_marker() == "d" for i in ax.lines)
+
+ def test_bar_colors(self):
+ default_colors = _unpack_cycler(plt.rcParams)
+
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot.bar()
+ _check_colors(ax.patches[::5], facecolors=default_colors[:5])
+
+ def test_bar_colors_custom(self):
+ custom_colors = "rgcby"
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot.bar(color=custom_colors)
+ _check_colors(ax.patches[::5], facecolors=custom_colors)
+
+ @pytest.mark.parametrize("colormap", ["jet", cm.jet])
+ def test_bar_colors_cmap(self, colormap):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+
+ ax = df.plot.bar(colormap=colormap)
+ rgba_colors = [cm.jet(n) for n in np.linspace(0, 1, 5)]
+ _check_colors(ax.patches[::5], facecolors=rgba_colors)
+
+ def test_bar_colors_single_col(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.loc[:, [0]].plot.bar(color="DodgerBlue")
+ _check_colors([ax.patches[0]], facecolors=["DodgerBlue"])
+
+ def test_bar_colors_green(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot(kind="bar", color="green")
+ _check_colors(ax.patches[::5], facecolors=["green"] * 5)
+
+ def test_bar_user_colors(self):
+ df = DataFrame(
+ {"A": range(4), "B": range(1, 5), "color": ["red", "blue", "blue", "red"]}
+ )
+ # This should *only* work when `y` is specified, else
+ # we use one color per column
+ ax = df.plot.bar(y="A", color=df["color"])
+ result = [p.get_facecolor() for p in ax.patches]
+ expected = [
+ (1.0, 0.0, 0.0, 1.0),
+ (0.0, 0.0, 1.0, 1.0),
+ (0.0, 0.0, 1.0, 1.0),
+ (1.0, 0.0, 0.0, 1.0),
+ ]
+ assert result == expected
+
+ def test_if_scatterplot_colorbar_affects_xaxis_visibility(self):
+ # addressing issue #10611, to ensure colobar does not
+ # interfere with x-axis label and ticklabels with
+ # ipython inline backend.
+ random_array = np.random.default_rng(2).random((10, 3))
+ df = DataFrame(random_array, columns=["A label", "B label", "C label"])
+
+ ax1 = df.plot.scatter(x="A label", y="B label")
+ ax2 = df.plot.scatter(x="A label", y="B label", c="C label")
+
+ vis1 = [vis.get_visible() for vis in ax1.xaxis.get_minorticklabels()]
+ vis2 = [vis.get_visible() for vis in ax2.xaxis.get_minorticklabels()]
+ assert vis1 == vis2
+
+ vis1 = [vis.get_visible() for vis in ax1.xaxis.get_majorticklabels()]
+ vis2 = [vis.get_visible() for vis in ax2.xaxis.get_majorticklabels()]
+ assert vis1 == vis2
+
+ assert (
+ ax1.xaxis.get_label().get_visible() == ax2.xaxis.get_label().get_visible()
+ )
+
+ def test_if_hexbin_xaxis_label_is_visible(self):
+ # addressing issue #10678, to ensure colobar does not
+ # interfere with x-axis label and ticklabels with
+ # ipython inline backend.
+ random_array = np.random.default_rng(2).random((10, 3))
+ df = DataFrame(random_array, columns=["A label", "B label", "C label"])
+
+ ax = df.plot.hexbin("A label", "B label", gridsize=12)
+ assert all(vis.get_visible() for vis in ax.xaxis.get_minorticklabels())
+ assert all(vis.get_visible() for vis in ax.xaxis.get_majorticklabels())
+ assert ax.xaxis.get_label().get_visible()
+
+ def test_if_scatterplot_colorbars_are_next_to_parent_axes(self):
+ random_array = np.random.default_rng(2).random((10, 3))
+ df = DataFrame(random_array, columns=["A label", "B label", "C label"])
+
+ fig, axes = plt.subplots(1, 2)
+ df.plot.scatter("A label", "B label", c="C label", ax=axes[0])
+ df.plot.scatter("A label", "B label", c="C label", ax=axes[1])
+ plt.tight_layout()
+
+ points = np.array([ax.get_position().get_points() for ax in fig.axes])
+ axes_x_coords = points[:, :, 0]
+ parent_distance = axes_x_coords[1, :] - axes_x_coords[0, :]
+ colorbar_distance = axes_x_coords[3, :] - axes_x_coords[2, :]
+ assert np.isclose(parent_distance, colorbar_distance, atol=1e-7).all()
+
+ @pytest.mark.parametrize("cmap", [None, "Greys"])
+ def test_scatter_with_c_column_name_with_colors(self, cmap):
+ # https://github.com/pandas-dev/pandas/issues/34316
+
+ df = DataFrame(
+ [[5.1, 3.5], [4.9, 3.0], [7.0, 3.2], [6.4, 3.2], [5.9, 3.0]],
+ columns=["length", "width"],
+ )
+ df["species"] = ["r", "r", "g", "g", "b"]
+ if cmap is not None:
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ ax = df.plot.scatter(x=0, y=1, cmap=cmap, c="species")
+ else:
+ ax = df.plot.scatter(x=0, y=1, c="species", cmap=cmap)
+ assert ax.collections[0].colorbar is None
+
+ def test_scatter_colors(self):
+ df = DataFrame({"a": [1, 2, 3], "b": [1, 2, 3], "c": [1, 2, 3]})
+ with pytest.raises(TypeError, match="Specify exactly one of `c` and `color`"):
+ df.plot.scatter(x="a", y="b", c="c", color="green")
+
+ def test_scatter_colors_not_raising_warnings(self):
+ # GH-53908. Do not raise UserWarning: No data for colormapping
+ # provided via 'c'. Parameters 'cmap' will be ignored
+ df = DataFrame({"x": [1, 2, 3], "y": [1, 2, 3]})
+ with tm.assert_produces_warning(None):
+ df.plot.scatter(x="x", y="y", c="b")
+
+ def test_scatter_colors_default(self):
+ df = DataFrame({"a": [1, 2, 3], "b": [1, 2, 3], "c": [1, 2, 3]})
+ default_colors = _unpack_cycler(mpl.pyplot.rcParams)
+
+ ax = df.plot.scatter(x="a", y="b", c="c")
+ tm.assert_numpy_array_equal(
+ ax.collections[0].get_facecolor()[0],
+ np.array(mpl.colors.ColorConverter.to_rgba(default_colors[0])),
+ )
+
+ def test_scatter_colors_white(self):
+ df = DataFrame({"a": [1, 2, 3], "b": [1, 2, 3], "c": [1, 2, 3]})
+ ax = df.plot.scatter(x="a", y="b", color="white")
+ tm.assert_numpy_array_equal(
+ ax.collections[0].get_facecolor()[0],
+ np.array([1, 1, 1, 1], dtype=np.float64),
+ )
+
+ def test_scatter_colorbar_different_cmap(self):
+ # GH 33389
+ df = DataFrame({"x": [1, 2, 3], "y": [1, 3, 2], "c": [1, 2, 3]})
+ df["x2"] = df["x"] + 1
+
+ _, ax = plt.subplots()
+ df.plot("x", "y", c="c", kind="scatter", cmap="cividis", ax=ax)
+ df.plot("x2", "y", c="c", kind="scatter", cmap="magma", ax=ax)
+
+ assert ax.collections[0].cmap.name == "cividis"
+ assert ax.collections[1].cmap.name == "magma"
+
+ def test_line_colors(self):
+ custom_colors = "rgcby"
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+
+ ax = df.plot(color=custom_colors)
+ _check_colors(ax.get_lines(), linecolors=custom_colors)
+
+ plt.close("all")
+
+ ax2 = df.plot(color=custom_colors)
+ lines2 = ax2.get_lines()
+
+ for l1, l2 in zip(ax.get_lines(), lines2):
+ assert l1.get_color() == l2.get_color()
+
+ @pytest.mark.parametrize("colormap", ["jet", cm.jet])
+ def test_line_colors_cmap(self, colormap):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot(colormap=colormap)
+ rgba_colors = [cm.jet(n) for n in np.linspace(0, 1, len(df))]
+ _check_colors(ax.get_lines(), linecolors=rgba_colors)
+
+ def test_line_colors_single_col(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # make color a list if plotting one column frame
+ # handles cases like df.plot(color='DodgerBlue')
+ ax = df.loc[:, [0]].plot(color="DodgerBlue")
+ _check_colors(ax.lines, linecolors=["DodgerBlue"])
+
+ def test_line_colors_single_color(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot(color="red")
+ _check_colors(ax.get_lines(), linecolors=["red"] * 5)
+
+ def test_line_colors_hex(self):
+ # GH 10299
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ custom_colors = ["#FF0000", "#0000FF", "#FFFF00", "#000000", "#FFFFFF"]
+ ax = df.plot(color=custom_colors)
+ _check_colors(ax.get_lines(), linecolors=custom_colors)
+
+ def test_dont_modify_colors(self):
+ colors = ["r", "g", "b"]
+ DataFrame(np.random.default_rng(2).random((10, 2))).plot(color=colors)
+ assert len(colors) == 3
+
+ def test_line_colors_and_styles_subplots(self):
+ # GH 9894
+ default_colors = _unpack_cycler(mpl.pyplot.rcParams)
+
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+
+ axes = df.plot(subplots=True)
+ for ax, c in zip(axes, list(default_colors)):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ @pytest.mark.parametrize("color", ["k", "green"])
+ def test_line_colors_and_styles_subplots_single_color_str(self, color):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ axes = df.plot(subplots=True, color=color)
+ for ax in axes:
+ _check_colors(ax.get_lines(), linecolors=[color])
+
+ @pytest.mark.parametrize("color", ["rgcby", list("rgcby")])
+ def test_line_colors_and_styles_subplots_custom_colors(self, color):
+ # GH 9894
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ axes = df.plot(color=color, subplots=True)
+ for ax, c in zip(axes, list(color)):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ def test_line_colors_and_styles_subplots_colormap_hex(self):
+ # GH 9894
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # GH 10299
+ custom_colors = ["#FF0000", "#0000FF", "#FFFF00", "#000000", "#FFFFFF"]
+ axes = df.plot(color=custom_colors, subplots=True)
+ for ax, c in zip(axes, list(custom_colors)):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ @pytest.mark.parametrize("cmap", ["jet", cm.jet])
+ def test_line_colors_and_styles_subplots_colormap_subplot(self, cmap):
+ # GH 9894
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ rgba_colors = [cm.jet(n) for n in np.linspace(0, 1, len(df))]
+ axes = df.plot(colormap=cmap, subplots=True)
+ for ax, c in zip(axes, rgba_colors):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ def test_line_colors_and_styles_subplots_single_col(self):
+ # GH 9894
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # make color a list if plotting one column frame
+ # handles cases like df.plot(color='DodgerBlue')
+ axes = df.loc[:, [0]].plot(color="DodgerBlue", subplots=True)
+ _check_colors(axes[0].lines, linecolors=["DodgerBlue"])
+
+ def test_line_colors_and_styles_subplots_single_char(self):
+ # GH 9894
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # single character style
+ axes = df.plot(style="r", subplots=True)
+ for ax in axes:
+ _check_colors(ax.get_lines(), linecolors=["r"])
+
+ def test_line_colors_and_styles_subplots_list_styles(self):
+ # GH 9894
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # list of styles
+ styles = list("rgcby")
+ axes = df.plot(style=styles, subplots=True)
+ for ax, c in zip(axes, styles):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ def test_area_colors(self):
+ from matplotlib.collections import PolyCollection
+
+ custom_colors = "rgcby"
+ df = DataFrame(np.random.default_rng(2).random((5, 5)))
+
+ ax = df.plot.area(color=custom_colors)
+ _check_colors(ax.get_lines(), linecolors=custom_colors)
+ poly = [o for o in ax.get_children() if isinstance(o, PolyCollection)]
+ _check_colors(poly, facecolors=custom_colors)
+
+ handles, _ = ax.get_legend_handles_labels()
+ _check_colors(handles, facecolors=custom_colors)
+
+ for h in handles:
+ assert h.get_alpha() is None
+
+ def test_area_colors_poly(self):
+ from matplotlib import cm
+ from matplotlib.collections import PolyCollection
+
+ df = DataFrame(np.random.default_rng(2).random((5, 5)))
+ ax = df.plot.area(colormap="jet")
+ jet_colors = [cm.jet(n) for n in np.linspace(0, 1, len(df))]
+ _check_colors(ax.get_lines(), linecolors=jet_colors)
+ poly = [o for o in ax.get_children() if isinstance(o, PolyCollection)]
+ _check_colors(poly, facecolors=jet_colors)
+
+ handles, _ = ax.get_legend_handles_labels()
+ _check_colors(handles, facecolors=jet_colors)
+ for h in handles:
+ assert h.get_alpha() is None
+
+ def test_area_colors_stacked_false(self):
+ from matplotlib import cm
+ from matplotlib.collections import PolyCollection
+
+ df = DataFrame(np.random.default_rng(2).random((5, 5)))
+ jet_colors = [cm.jet(n) for n in np.linspace(0, 1, len(df))]
+ # When stacked=False, alpha is set to 0.5
+ ax = df.plot.area(colormap=cm.jet, stacked=False)
+ _check_colors(ax.get_lines(), linecolors=jet_colors)
+ poly = [o for o in ax.get_children() if isinstance(o, PolyCollection)]
+ jet_with_alpha = [(c[0], c[1], c[2], 0.5) for c in jet_colors]
+ _check_colors(poly, facecolors=jet_with_alpha)
+
+ handles, _ = ax.get_legend_handles_labels()
+ linecolors = jet_with_alpha
+ _check_colors(handles[: len(jet_colors)], linecolors=linecolors)
+ for h in handles:
+ assert h.get_alpha() == 0.5
+
+ def test_hist_colors(self):
+ default_colors = _unpack_cycler(mpl.pyplot.rcParams)
+
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot.hist()
+ _check_colors(ax.patches[::10], facecolors=default_colors[:5])
+
+ def test_hist_colors_single_custom(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ custom_colors = "rgcby"
+ ax = df.plot.hist(color=custom_colors)
+ _check_colors(ax.patches[::10], facecolors=custom_colors)
+
+ @pytest.mark.parametrize("colormap", ["jet", cm.jet])
+ def test_hist_colors_cmap(self, colormap):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot.hist(colormap=colormap)
+ rgba_colors = [cm.jet(n) for n in np.linspace(0, 1, 5)]
+ _check_colors(ax.patches[::10], facecolors=rgba_colors)
+
+ def test_hist_colors_single_col(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.loc[:, [0]].plot.hist(color="DodgerBlue")
+ _check_colors([ax.patches[0]], facecolors=["DodgerBlue"])
+
+ def test_hist_colors_single_color(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot(kind="hist", color="green")
+ _check_colors(ax.patches[::10], facecolors=["green"] * 5)
+
+ def test_kde_colors(self):
+ pytest.importorskip("scipy")
+ custom_colors = "rgcby"
+ df = DataFrame(np.random.default_rng(2).random((5, 5)))
+
+ ax = df.plot.kde(color=custom_colors)
+ _check_colors(ax.get_lines(), linecolors=custom_colors)
+
+ @pytest.mark.parametrize("colormap", ["jet", cm.jet])
+ def test_kde_colors_cmap(self, colormap):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot.kde(colormap=colormap)
+ rgba_colors = [cm.jet(n) for n in np.linspace(0, 1, len(df))]
+ _check_colors(ax.get_lines(), linecolors=rgba_colors)
+
+ def test_kde_colors_and_styles_subplots(self):
+ pytest.importorskip("scipy")
+ default_colors = _unpack_cycler(mpl.pyplot.rcParams)
+
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+
+ axes = df.plot(kind="kde", subplots=True)
+ for ax, c in zip(axes, list(default_colors)):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ @pytest.mark.parametrize("colormap", ["k", "red"])
+ def test_kde_colors_and_styles_subplots_single_col_str(self, colormap):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ axes = df.plot(kind="kde", color=colormap, subplots=True)
+ for ax in axes:
+ _check_colors(ax.get_lines(), linecolors=[colormap])
+
+ def test_kde_colors_and_styles_subplots_custom_color(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ custom_colors = "rgcby"
+ axes = df.plot(kind="kde", color=custom_colors, subplots=True)
+ for ax, c in zip(axes, list(custom_colors)):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ @pytest.mark.parametrize("colormap", ["jet", cm.jet])
+ def test_kde_colors_and_styles_subplots_cmap(self, colormap):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ rgba_colors = [cm.jet(n) for n in np.linspace(0, 1, len(df))]
+ axes = df.plot(kind="kde", colormap=colormap, subplots=True)
+ for ax, c in zip(axes, rgba_colors):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ def test_kde_colors_and_styles_subplots_single_col(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # make color a list if plotting one column frame
+ # handles cases like df.plot(color='DodgerBlue')
+ axes = df.loc[:, [0]].plot(kind="kde", color="DodgerBlue", subplots=True)
+ _check_colors(axes[0].lines, linecolors=["DodgerBlue"])
+
+ def test_kde_colors_and_styles_subplots_single_char(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # list of styles
+ # single character style
+ axes = df.plot(kind="kde", style="r", subplots=True)
+ for ax in axes:
+ _check_colors(ax.get_lines(), linecolors=["r"])
+
+ def test_kde_colors_and_styles_subplots_list(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # list of styles
+ styles = list("rgcby")
+ axes = df.plot(kind="kde", style=styles, subplots=True)
+ for ax, c in zip(axes, styles):
+ _check_colors(ax.get_lines(), linecolors=[c])
+
+ def test_boxplot_colors(self):
+ default_colors = _unpack_cycler(mpl.pyplot.rcParams)
+
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ bp = df.plot.box(return_type="dict")
+ _check_colors_box(
+ bp,
+ default_colors[0],
+ default_colors[0],
+ default_colors[2],
+ default_colors[0],
+ )
+
+ def test_boxplot_colors_dict_colors(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ dict_colors = {
+ "boxes": "#572923",
+ "whiskers": "#982042",
+ "medians": "#804823",
+ "caps": "#123456",
+ }
+ bp = df.plot.box(color=dict_colors, sym="r+", return_type="dict")
+ _check_colors_box(
+ bp,
+ dict_colors["boxes"],
+ dict_colors["whiskers"],
+ dict_colors["medians"],
+ dict_colors["caps"],
+ "r",
+ )
+
+ def test_boxplot_colors_default_color(self):
+ default_colors = _unpack_cycler(mpl.pyplot.rcParams)
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # partial colors
+ dict_colors = {"whiskers": "c", "medians": "m"}
+ bp = df.plot.box(color=dict_colors, return_type="dict")
+ _check_colors_box(bp, default_colors[0], "c", "m", default_colors[0])
+
+ @pytest.mark.parametrize("colormap", ["jet", cm.jet])
+ def test_boxplot_colors_cmap(self, colormap):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ bp = df.plot.box(colormap=colormap, return_type="dict")
+ jet_colors = [cm.jet(n) for n in np.linspace(0, 1, 3)]
+ _check_colors_box(
+ bp, jet_colors[0], jet_colors[0], jet_colors[2], jet_colors[0]
+ )
+
+ def test_boxplot_colors_single(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # string color is applied to all artists except fliers
+ bp = df.plot.box(color="DodgerBlue", return_type="dict")
+ _check_colors_box(bp, "DodgerBlue", "DodgerBlue", "DodgerBlue", "DodgerBlue")
+
+ def test_boxplot_colors_tuple(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ # tuple is also applied to all artists except fliers
+ bp = df.plot.box(color=(0, 1, 0), sym="#123456", return_type="dict")
+ _check_colors_box(bp, (0, 1, 0), (0, 1, 0), (0, 1, 0), (0, 1, 0), "#123456")
+
+ def test_boxplot_colors_invalid(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ msg = re.escape(
+ "color dict contains invalid key 'xxxx'. The key must be either "
+ "['boxes', 'whiskers', 'medians', 'caps']"
+ )
+ with pytest.raises(ValueError, match=msg):
+ # Color contains invalid key results in ValueError
+ df.plot.box(color={"boxes": "red", "xxxx": "blue"})
+
+ def test_default_color_cycle(self):
+ import cycler
+
+ colors = list("rgbk")
+ plt.rcParams["axes.prop_cycle"] = cycler.cycler("color", colors)
+
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
+ ax = df.plot()
+
+ expected = _unpack_cycler(plt.rcParams)[:3]
+ _check_colors(ax.get_lines(), linecolors=expected)
+
+ def test_no_color_bar(self):
+ df = DataFrame(
+ {
+ "A": np.random.default_rng(2).uniform(size=20),
+ "B": np.random.default_rng(2).uniform(size=20),
+ "C": np.arange(20) + np.random.default_rng(2).uniform(size=20),
+ }
+ )
+ ax = df.plot.hexbin(x="A", y="B", colorbar=None)
+ assert ax.collections[0].colorbar is None
+
+ def test_mixing_cmap_and_colormap_raises(self):
+ df = DataFrame(
+ {
+ "A": np.random.default_rng(2).uniform(size=20),
+ "B": np.random.default_rng(2).uniform(size=20),
+ "C": np.arange(20) + np.random.default_rng(2).uniform(size=20),
+ }
+ )
+ msg = "Only specify one of `cmap` and `colormap`"
+ with pytest.raises(TypeError, match=msg):
+ df.plot.hexbin(x="A", y="B", cmap="YlGn", colormap="BuGn")
+
+ def test_passed_bar_colors(self):
+ color_tuples = [(0.9, 0, 0, 1), (0, 0.9, 0, 1), (0, 0, 0.9, 1)]
+ colormap = mpl.colors.ListedColormap(color_tuples)
+ barplot = DataFrame([[1, 2, 3]]).plot(kind="bar", cmap=colormap)
+ assert color_tuples == [c.get_facecolor() for c in barplot.patches]
+
+ def test_rcParams_bar_colors(self):
+ color_tuples = [(0.9, 0, 0, 1), (0, 0.9, 0, 1), (0, 0, 0.9, 1)]
+ with mpl.rc_context(rc={"axes.prop_cycle": mpl.cycler("color", color_tuples)}):
+ barplot = DataFrame([[1, 2, 3]]).plot(kind="bar")
+ assert color_tuples == [c.get_facecolor() for c in barplot.patches]
+
+ def test_colors_of_columns_with_same_name(self):
+ # ISSUE 11136 -> https://github.com/pandas-dev/pandas/issues/11136
+ # Creating a DataFrame with duplicate column labels and testing colors of them.
+ df = DataFrame({"b": [0, 1, 0], "a": [1, 2, 3]})
+ df1 = DataFrame({"a": [2, 4, 6]})
+ df_concat = pd.concat([df, df1], axis=1)
+ result = df_concat.plot()
+ legend = result.get_legend()
+ if Version(mpl.__version__) < Version("3.7"):
+ handles = legend.legendHandles
+ else:
+ handles = legend.legend_handles
+ for legend, line in zip(handles, result.lines):
+ assert legend.get_color() == line.get_color()
+
+ def test_invalid_colormap(self):
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((3, 2)), columns=["A", "B"]
+ )
+ msg = "(is not a valid value)|(is not a known colormap)"
+ with pytest.raises((ValueError, KeyError), match=msg):
+ df.plot(colormap="invalid_colormap")
+
+ def test_dataframe_none_color(self):
+ # GH51953
+ df = DataFrame([[1, 2, 3]])
+ ax = df.plot(color=None)
+ expected = _unpack_cycler(mpl.pyplot.rcParams)[:3]
+ _check_colors(ax.get_lines(), linecolors=expected)
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_groupby.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_groupby.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1924185a3df1cae2f0df89ec84225cd68f8fa6d
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_groupby.py
@@ -0,0 +1,72 @@
+""" Test cases for DataFrame.plot """
+
+import pytest
+
+from pandas import DataFrame
+from pandas.tests.plotting.common import _check_visible
+
+pytest.importorskip("matplotlib")
+
+
+class TestDataFramePlotsGroupby:
+ def _assert_ytickslabels_visibility(self, axes, expected):
+ for ax, exp in zip(axes, expected):
+ _check_visible(ax.get_yticklabels(), visible=exp)
+
+ def _assert_xtickslabels_visibility(self, axes, expected):
+ for ax, exp in zip(axes, expected):
+ _check_visible(ax.get_xticklabels(), visible=exp)
+
+ @pytest.mark.parametrize(
+ "kwargs, expected",
+ [
+ # behavior without keyword
+ ({}, [True, False, True, False]),
+ # set sharey=True should be identical
+ ({"sharey": True}, [True, False, True, False]),
+ # sharey=False, all yticklabels should be visible
+ ({"sharey": False}, [True, True, True, True]),
+ ],
+ )
+ def test_groupby_boxplot_sharey(self, kwargs, expected):
+ # https://github.com/pandas-dev/pandas/issues/20968
+ # sharey can now be switched check whether the right
+ # pair of axes is turned on or off
+ df = DataFrame(
+ {
+ "a": [-1.43, -0.15, -3.70, -1.43, -0.14],
+ "b": [0.56, 0.84, 0.29, 0.56, 0.85],
+ "c": [0, 1, 2, 3, 1],
+ },
+ index=[0, 1, 2, 3, 4],
+ )
+ axes = df.groupby("c").boxplot(**kwargs)
+ self._assert_ytickslabels_visibility(axes, expected)
+
+ @pytest.mark.parametrize(
+ "kwargs, expected",
+ [
+ # behavior without keyword
+ ({}, [True, True, True, True]),
+ # set sharex=False should be identical
+ ({"sharex": False}, [True, True, True, True]),
+ # sharex=True, xticklabels should be visible
+ # only for bottom plots
+ ({"sharex": True}, [False, False, True, True]),
+ ],
+ )
+ def test_groupby_boxplot_sharex(self, kwargs, expected):
+ # https://github.com/pandas-dev/pandas/issues/20968
+ # sharex can now be switched check whether the right
+ # pair of axes is turned on or off
+
+ df = DataFrame(
+ {
+ "a": [-1.43, -0.15, -3.70, -1.43, -0.14],
+ "b": [0.56, 0.84, 0.29, 0.56, 0.85],
+ "c": [0, 1, 2, 3, 1],
+ },
+ index=[0, 1, 2, 3, 4],
+ )
+ axes = df.groupby("c").boxplot(**kwargs)
+ self._assert_xtickslabels_visibility(axes, expected)
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_legend.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_legend.py
new file mode 100644
index 0000000000000000000000000000000000000000..402a4b9531e5d4857d0d6e9d7cda2c002d0469d4
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_legend.py
@@ -0,0 +1,272 @@
+import numpy as np
+import pytest
+
+import pandas.util._test_decorators as td
+
+from pandas import (
+ DataFrame,
+ date_range,
+)
+from pandas.tests.plotting.common import (
+ _check_legend_labels,
+ _check_legend_marker,
+ _check_text_labels,
+)
+from pandas.util.version import Version
+
+mpl = pytest.importorskip("matplotlib")
+
+
+class TestFrameLegend:
+ @pytest.mark.xfail(
+ reason=(
+ "Open bug in matplotlib "
+ "https://github.com/matplotlib/matplotlib/issues/11357"
+ )
+ )
+ def test_mixed_yerr(self):
+ # https://github.com/pandas-dev/pandas/issues/39522
+ from matplotlib.collections import LineCollection
+ from matplotlib.lines import Line2D
+
+ df = DataFrame([{"x": 1, "a": 1, "b": 1}, {"x": 2, "a": 2, "b": 3}])
+
+ ax = df.plot("x", "a", c="orange", yerr=0.1, label="orange")
+ df.plot("x", "b", c="blue", yerr=None, ax=ax, label="blue")
+
+ legend = ax.get_legend()
+ if Version(mpl.__version__) < Version("3.7"):
+ result_handles = legend.legendHandles
+ else:
+ result_handles = legend.legend_handles
+
+ assert isinstance(result_handles[0], LineCollection)
+ assert isinstance(result_handles[1], Line2D)
+
+ def test_legend_false(self):
+ # https://github.com/pandas-dev/pandas/issues/40044
+ df = DataFrame({"a": [1, 1], "b": [2, 3]})
+ df2 = DataFrame({"d": [2.5, 2.5]})
+
+ ax = df.plot(legend=True, color={"a": "blue", "b": "green"}, secondary_y="b")
+ df2.plot(legend=True, color={"d": "red"}, ax=ax)
+ legend = ax.get_legend()
+ if Version(mpl.__version__) < Version("3.7"):
+ handles = legend.legendHandles
+ else:
+ handles = legend.legend_handles
+ result = [handle.get_color() for handle in handles]
+ expected = ["blue", "green", "red"]
+ assert result == expected
+
+ @pytest.mark.parametrize("kind", ["line", "bar", "barh", "kde", "area", "hist"])
+ def test_df_legend_labels(self, kind):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).random((3, 3)), columns=["a", "b", "c"])
+ df2 = DataFrame(
+ np.random.default_rng(2).random((3, 3)), columns=["d", "e", "f"]
+ )
+ df3 = DataFrame(
+ np.random.default_rng(2).random((3, 3)), columns=["g", "h", "i"]
+ )
+ df4 = DataFrame(
+ np.random.default_rng(2).random((3, 3)), columns=["j", "k", "l"]
+ )
+
+ ax = df.plot(kind=kind, legend=True)
+ _check_legend_labels(ax, labels=df.columns)
+
+ ax = df2.plot(kind=kind, legend=False, ax=ax)
+ _check_legend_labels(ax, labels=df.columns)
+
+ ax = df3.plot(kind=kind, legend=True, ax=ax)
+ _check_legend_labels(ax, labels=df.columns.union(df3.columns))
+
+ ax = df4.plot(kind=kind, legend="reverse", ax=ax)
+ expected = list(df.columns.union(df3.columns)) + list(reversed(df4.columns))
+ _check_legend_labels(ax, labels=expected)
+
+ def test_df_legend_labels_secondary_y(self):
+ pytest.importorskip("scipy")
+ df = DataFrame(np.random.default_rng(2).random((3, 3)), columns=["a", "b", "c"])
+ df2 = DataFrame(
+ np.random.default_rng(2).random((3, 3)), columns=["d", "e", "f"]
+ )
+ df3 = DataFrame(
+ np.random.default_rng(2).random((3, 3)), columns=["g", "h", "i"]
+ )
+ # Secondary Y
+ ax = df.plot(legend=True, secondary_y="b")
+ _check_legend_labels(ax, labels=["a", "b (right)", "c"])
+ ax = df2.plot(legend=False, ax=ax)
+ _check_legend_labels(ax, labels=["a", "b (right)", "c"])
+ ax = df3.plot(kind="bar", legend=True, secondary_y="h", ax=ax)
+ _check_legend_labels(ax, labels=["a", "b (right)", "c", "g", "h (right)", "i"])
+
+ def test_df_legend_labels_time_series(self):
+ # Time Series
+ pytest.importorskip("scipy")
+ ind = date_range("1/1/2014", periods=3)
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((3, 3)),
+ columns=["a", "b", "c"],
+ index=ind,
+ )
+ df2 = DataFrame(
+ np.random.default_rng(2).standard_normal((3, 3)),
+ columns=["d", "e", "f"],
+ index=ind,
+ )
+ df3 = DataFrame(
+ np.random.default_rng(2).standard_normal((3, 3)),
+ columns=["g", "h", "i"],
+ index=ind,
+ )
+ ax = df.plot(legend=True, secondary_y="b")
+ _check_legend_labels(ax, labels=["a", "b (right)", "c"])
+ ax = df2.plot(legend=False, ax=ax)
+ _check_legend_labels(ax, labels=["a", "b (right)", "c"])
+ ax = df3.plot(legend=True, ax=ax)
+ _check_legend_labels(ax, labels=["a", "b (right)", "c", "g", "h", "i"])
+
+ def test_df_legend_labels_time_series_scatter(self):
+ # Time Series
+ pytest.importorskip("scipy")
+ ind = date_range("1/1/2014", periods=3)
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((3, 3)),
+ columns=["a", "b", "c"],
+ index=ind,
+ )
+ df2 = DataFrame(
+ np.random.default_rng(2).standard_normal((3, 3)),
+ columns=["d", "e", "f"],
+ index=ind,
+ )
+ df3 = DataFrame(
+ np.random.default_rng(2).standard_normal((3, 3)),
+ columns=["g", "h", "i"],
+ index=ind,
+ )
+ # scatter
+ ax = df.plot.scatter(x="a", y="b", label="data1")
+ _check_legend_labels(ax, labels=["data1"])
+ ax = df2.plot.scatter(x="d", y="e", legend=False, label="data2", ax=ax)
+ _check_legend_labels(ax, labels=["data1"])
+ ax = df3.plot.scatter(x="g", y="h", label="data3", ax=ax)
+ _check_legend_labels(ax, labels=["data1", "data3"])
+
+ def test_df_legend_labels_time_series_no_mutate(self):
+ pytest.importorskip("scipy")
+ ind = date_range("1/1/2014", periods=3)
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((3, 3)),
+ columns=["a", "b", "c"],
+ index=ind,
+ )
+ # ensure label args pass through and
+ # index name does not mutate
+ # column names don't mutate
+ df5 = df.set_index("a")
+ ax = df5.plot(y="b")
+ _check_legend_labels(ax, labels=["b"])
+ ax = df5.plot(y="b", label="LABEL_b")
+ _check_legend_labels(ax, labels=["LABEL_b"])
+ _check_text_labels(ax.xaxis.get_label(), "a")
+ ax = df5.plot(y="c", label="LABEL_c", ax=ax)
+ _check_legend_labels(ax, labels=["LABEL_b", "LABEL_c"])
+ assert df5.columns.tolist() == ["b", "c"]
+
+ def test_missing_marker_multi_plots_on_same_ax(self):
+ # GH 18222
+ df = DataFrame(data=[[1, 1, 1, 1], [2, 2, 4, 8]], columns=["x", "r", "g", "b"])
+ _, ax = mpl.pyplot.subplots(nrows=1, ncols=3)
+ # Left plot
+ df.plot(x="x", y="r", linewidth=0, marker="o", color="r", ax=ax[0])
+ df.plot(x="x", y="g", linewidth=1, marker="x", color="g", ax=ax[0])
+ df.plot(x="x", y="b", linewidth=1, marker="o", color="b", ax=ax[0])
+ _check_legend_labels(ax[0], labels=["r", "g", "b"])
+ _check_legend_marker(ax[0], expected_markers=["o", "x", "o"])
+ # Center plot
+ df.plot(x="x", y="b", linewidth=1, marker="o", color="b", ax=ax[1])
+ df.plot(x="x", y="r", linewidth=0, marker="o", color="r", ax=ax[1])
+ df.plot(x="x", y="g", linewidth=1, marker="x", color="g", ax=ax[1])
+ _check_legend_labels(ax[1], labels=["b", "r", "g"])
+ _check_legend_marker(ax[1], expected_markers=["o", "o", "x"])
+ # Right plot
+ df.plot(x="x", y="g", linewidth=1, marker="x", color="g", ax=ax[2])
+ df.plot(x="x", y="b", linewidth=1, marker="o", color="b", ax=ax[2])
+ df.plot(x="x", y="r", linewidth=0, marker="o", color="r", ax=ax[2])
+ _check_legend_labels(ax[2], labels=["g", "b", "r"])
+ _check_legend_marker(ax[2], expected_markers=["x", "o", "o"])
+
+ def test_legend_name(self):
+ multi = DataFrame(
+ np.random.default_rng(2).standard_normal((4, 4)),
+ columns=[np.array(["a", "a", "b", "b"]), np.array(["x", "y", "x", "y"])],
+ )
+ multi.columns.names = ["group", "individual"]
+
+ ax = multi.plot()
+ leg_title = ax.legend_.get_title()
+ _check_text_labels(leg_title, "group,individual")
+
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot(legend=True, ax=ax)
+ leg_title = ax.legend_.get_title()
+ _check_text_labels(leg_title, "group,individual")
+
+ df.columns.name = "new"
+ ax = df.plot(legend=False, ax=ax)
+ leg_title = ax.legend_.get_title()
+ _check_text_labels(leg_title, "group,individual")
+
+ ax = df.plot(legend=True, ax=ax)
+ leg_title = ax.legend_.get_title()
+ _check_text_labels(leg_title, "new")
+
+ @pytest.mark.parametrize(
+ "kind",
+ [
+ "line",
+ "bar",
+ "barh",
+ pytest.param("kde", marks=td.skip_if_no("scipy")),
+ "area",
+ "hist",
+ ],
+ )
+ def test_no_legend(self, kind):
+ df = DataFrame(np.random.default_rng(2).random((3, 3)), columns=["a", "b", "c"])
+ ax = df.plot(kind=kind, legend=False)
+ _check_legend_labels(ax, visible=False)
+
+ def test_missing_markers_legend(self):
+ # 14958
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((8, 3)), columns=["A", "B", "C"]
+ )
+ ax = df.plot(y=["A"], marker="x", linestyle="solid")
+ df.plot(y=["B"], marker="o", linestyle="dotted", ax=ax)
+ df.plot(y=["C"], marker="<", linestyle="dotted", ax=ax)
+
+ _check_legend_labels(ax, labels=["A", "B", "C"])
+ _check_legend_marker(ax, expected_markers=["x", "o", "<"])
+
+ def test_missing_markers_legend_using_style(self):
+ # 14563
+ df = DataFrame(
+ {
+ "A": [1, 2, 3, 4, 5, 6],
+ "B": [2, 4, 1, 3, 2, 4],
+ "C": [3, 3, 2, 6, 4, 2],
+ "X": [1, 2, 3, 4, 5, 6],
+ }
+ )
+
+ _, ax = mpl.pyplot.subplots()
+ for kind in "ABC":
+ df.plot("X", kind, label=kind, ax=ax, style=".")
+
+ _check_legend_labels(ax, labels=["A", "B", "C"])
+ _check_legend_marker(ax, expected_markers=[".", ".", "."])
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_subplots.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_subplots.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d8d8fa4cdee38d568d099019e89114fb0cdb4e9
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_frame_subplots.py
@@ -0,0 +1,752 @@
+""" Test cases for DataFrame.plot """
+
+import string
+
+import numpy as np
+import pytest
+
+from pandas.compat import is_platform_linux
+from pandas.compat.numpy import np_version_gte1p24
+
+import pandas as pd
+from pandas import (
+ DataFrame,
+ Series,
+ date_range,
+)
+import pandas._testing as tm
+from pandas.tests.plotting.common import (
+ _check_axes_shape,
+ _check_box_return_type,
+ _check_legend_labels,
+ _check_ticks_props,
+ _check_visible,
+ _flatten_visible,
+)
+
+from pandas.io.formats.printing import pprint_thing
+
+mpl = pytest.importorskip("matplotlib")
+plt = pytest.importorskip("matplotlib.pyplot")
+
+
+class TestDataFramePlotsSubplots:
+ @pytest.mark.slow
+ @pytest.mark.parametrize("kind", ["bar", "barh", "line", "area"])
+ def test_subplots(self, kind):
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+
+ axes = df.plot(kind=kind, subplots=True, sharex=True, legend=True)
+ _check_axes_shape(axes, axes_num=3, layout=(3, 1))
+ assert axes.shape == (3,)
+
+ for ax, column in zip(axes, df.columns):
+ _check_legend_labels(ax, labels=[pprint_thing(column)])
+
+ for ax in axes[:-2]:
+ _check_visible(ax.xaxis) # xaxis must be visible for grid
+ _check_visible(ax.get_xticklabels(), visible=False)
+ if kind != "bar":
+ # change https://github.com/pandas-dev/pandas/issues/26714
+ _check_visible(ax.get_xticklabels(minor=True), visible=False)
+ _check_visible(ax.xaxis.get_label(), visible=False)
+ _check_visible(ax.get_yticklabels())
+
+ _check_visible(axes[-1].xaxis)
+ _check_visible(axes[-1].get_xticklabels())
+ _check_visible(axes[-1].get_xticklabels(minor=True))
+ _check_visible(axes[-1].xaxis.get_label())
+ _check_visible(axes[-1].get_yticklabels())
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("kind", ["bar", "barh", "line", "area"])
+ def test_subplots_no_share_x(self, kind):
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+ axes = df.plot(kind=kind, subplots=True, sharex=False)
+ for ax in axes:
+ _check_visible(ax.xaxis)
+ _check_visible(ax.get_xticklabels())
+ _check_visible(ax.get_xticklabels(minor=True))
+ _check_visible(ax.xaxis.get_label())
+ _check_visible(ax.get_yticklabels())
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("kind", ["bar", "barh", "line", "area"])
+ def test_subplots_no_legend(self, kind):
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+ axes = df.plot(kind=kind, subplots=True, legend=False)
+ for ax in axes:
+ assert ax.get_legend() is None
+
+ @pytest.mark.parametrize("kind", ["line", "area"])
+ def test_subplots_timeseries(self, kind):
+ idx = date_range(start="2014-07-01", freq="ME", periods=10)
+ df = DataFrame(np.random.default_rng(2).random((10, 3)), index=idx)
+
+ axes = df.plot(kind=kind, subplots=True, sharex=True)
+ _check_axes_shape(axes, axes_num=3, layout=(3, 1))
+
+ for ax in axes[:-2]:
+ # GH 7801
+ _check_visible(ax.xaxis) # xaxis must be visible for grid
+ _check_visible(ax.get_xticklabels(), visible=False)
+ _check_visible(ax.get_xticklabels(minor=True), visible=False)
+ _check_visible(ax.xaxis.get_label(), visible=False)
+ _check_visible(ax.get_yticklabels())
+
+ _check_visible(axes[-1].xaxis)
+ _check_visible(axes[-1].get_xticklabels())
+ _check_visible(axes[-1].get_xticklabels(minor=True))
+ _check_visible(axes[-1].xaxis.get_label())
+ _check_visible(axes[-1].get_yticklabels())
+ _check_ticks_props(axes, xrot=0)
+
+ @pytest.mark.parametrize("kind", ["line", "area"])
+ def test_subplots_timeseries_rot(self, kind):
+ idx = date_range(start="2014-07-01", freq="ME", periods=10)
+ df = DataFrame(np.random.default_rng(2).random((10, 3)), index=idx)
+ axes = df.plot(kind=kind, subplots=True, sharex=False, rot=45, fontsize=7)
+ for ax in axes:
+ _check_visible(ax.xaxis)
+ _check_visible(ax.get_xticklabels())
+ _check_visible(ax.get_xticklabels(minor=True))
+ _check_visible(ax.xaxis.get_label())
+ _check_visible(ax.get_yticklabels())
+ _check_ticks_props(ax, xlabelsize=7, xrot=45, ylabelsize=7)
+
+ @pytest.mark.parametrize(
+ "col", ["numeric", "timedelta", "datetime_no_tz", "datetime_all_tz"]
+ )
+ def test_subplots_timeseries_y_axis(self, col):
+ # GH16953
+ data = {
+ "numeric": np.array([1, 2, 5]),
+ "timedelta": [
+ pd.Timedelta(-10, unit="s"),
+ pd.Timedelta(10, unit="m"),
+ pd.Timedelta(10, unit="h"),
+ ],
+ "datetime_no_tz": [
+ pd.to_datetime("2017-08-01 00:00:00"),
+ pd.to_datetime("2017-08-01 02:00:00"),
+ pd.to_datetime("2017-08-02 00:00:00"),
+ ],
+ "datetime_all_tz": [
+ pd.to_datetime("2017-08-01 00:00:00", utc=True),
+ pd.to_datetime("2017-08-01 02:00:00", utc=True),
+ pd.to_datetime("2017-08-02 00:00:00", utc=True),
+ ],
+ "text": ["This", "should", "fail"],
+ }
+ testdata = DataFrame(data)
+
+ ax = testdata.plot(y=col)
+ result = ax.get_lines()[0].get_data()[1]
+ expected = testdata[col].values
+ assert (result == expected).all()
+
+ def test_subplots_timeseries_y_text_error(self):
+ # GH16953
+ data = {
+ "numeric": np.array([1, 2, 5]),
+ "text": ["This", "should", "fail"],
+ }
+ testdata = DataFrame(data)
+ msg = "no numeric data to plot"
+ with pytest.raises(TypeError, match=msg):
+ testdata.plot(y="text")
+
+ @pytest.mark.xfail(reason="not support for period, categorical, datetime_mixed_tz")
+ def test_subplots_timeseries_y_axis_not_supported(self):
+ """
+ This test will fail for:
+ period:
+ since period isn't yet implemented in ``select_dtypes``
+ and because it will need a custom value converter +
+ tick formatter (as was done for x-axis plots)
+
+ categorical:
+ because it will need a custom value converter +
+ tick formatter (also doesn't work for x-axis, as of now)
+
+ datetime_mixed_tz:
+ because of the way how pandas handles ``Series`` of
+ ``datetime`` objects with different timezone,
+ generally converting ``datetime`` objects in a tz-aware
+ form could help with this problem
+ """
+ data = {
+ "numeric": np.array([1, 2, 5]),
+ "period": [
+ pd.Period("2017-08-01 00:00:00", freq="H"),
+ pd.Period("2017-08-01 02:00", freq="H"),
+ pd.Period("2017-08-02 00:00:00", freq="H"),
+ ],
+ "categorical": pd.Categorical(
+ ["c", "b", "a"], categories=["a", "b", "c"], ordered=False
+ ),
+ "datetime_mixed_tz": [
+ pd.to_datetime("2017-08-01 00:00:00", utc=True),
+ pd.to_datetime("2017-08-01 02:00:00"),
+ pd.to_datetime("2017-08-02 00:00:00"),
+ ],
+ }
+ testdata = DataFrame(data)
+ ax_period = testdata.plot(x="numeric", y="period")
+ assert (
+ ax_period.get_lines()[0].get_data()[1] == testdata["period"].values
+ ).all()
+ ax_categorical = testdata.plot(x="numeric", y="categorical")
+ assert (
+ ax_categorical.get_lines()[0].get_data()[1]
+ == testdata["categorical"].values
+ ).all()
+ ax_datetime_mixed_tz = testdata.plot(x="numeric", y="datetime_mixed_tz")
+ assert (
+ ax_datetime_mixed_tz.get_lines()[0].get_data()[1]
+ == testdata["datetime_mixed_tz"].values
+ ).all()
+
+ @pytest.mark.parametrize(
+ "layout, exp_layout",
+ [
+ [(2, 2), (2, 2)],
+ [(-1, 2), (2, 2)],
+ [(2, -1), (2, 2)],
+ [(1, 4), (1, 4)],
+ [(-1, 4), (1, 4)],
+ [(4, -1), (4, 1)],
+ ],
+ )
+ def test_subplots_layout_multi_column(self, layout, exp_layout):
+ # GH 6667
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+
+ axes = df.plot(subplots=True, layout=layout)
+ _check_axes_shape(axes, axes_num=3, layout=exp_layout)
+ assert axes.shape == exp_layout
+
+ def test_subplots_layout_multi_column_error(self):
+ # GH 6667
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+ msg = "Layout of 1x1 must be larger than required size 3"
+
+ with pytest.raises(ValueError, match=msg):
+ df.plot(subplots=True, layout=(1, 1))
+
+ msg = "At least one dimension of layout must be positive"
+ with pytest.raises(ValueError, match=msg):
+ df.plot(subplots=True, layout=(-1, -1))
+
+ @pytest.mark.parametrize(
+ "kwargs, expected_axes_num, expected_layout, expected_shape",
+ [
+ ({}, 1, (1, 1), (1,)),
+ ({"layout": (3, 3)}, 1, (3, 3), (3, 3)),
+ ],
+ )
+ def test_subplots_layout_single_column(
+ self, kwargs, expected_axes_num, expected_layout, expected_shape
+ ):
+ # GH 6667
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 1)),
+ index=list(string.ascii_letters[:10]),
+ )
+ axes = df.plot(subplots=True, **kwargs)
+ _check_axes_shape(
+ axes,
+ axes_num=expected_axes_num,
+ layout=expected_layout,
+ )
+ assert axes.shape == expected_shape
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("idx", [range(5), date_range("1/1/2000", periods=5)])
+ def test_subplots_warnings(self, idx):
+ # GH 9464
+ with tm.assert_produces_warning(None):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 4)), index=idx)
+ df.plot(subplots=True, layout=(3, 2))
+
+ def test_subplots_multiple_axes(self):
+ # GH 5353, 6970, GH 7069
+ fig, axes = mpl.pyplot.subplots(2, 3)
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+
+ returned = df.plot(subplots=True, ax=axes[0], sharex=False, sharey=False)
+ _check_axes_shape(returned, axes_num=3, layout=(1, 3))
+ assert returned.shape == (3,)
+ assert returned[0].figure is fig
+ # draw on second row
+ returned = df.plot(subplots=True, ax=axes[1], sharex=False, sharey=False)
+ _check_axes_shape(returned, axes_num=3, layout=(1, 3))
+ assert returned.shape == (3,)
+ assert returned[0].figure is fig
+ _check_axes_shape(axes, axes_num=6, layout=(2, 3))
+
+ def test_subplots_multiple_axes_error(self):
+ # GH 5353, 6970, GH 7069
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 3)),
+ index=list(string.ascii_letters[:10]),
+ )
+ msg = "The number of passed axes must be 3, the same as the output plot"
+ _, axes = mpl.pyplot.subplots(2, 3)
+
+ with pytest.raises(ValueError, match=msg):
+ # pass different number of axes from required
+ df.plot(subplots=True, ax=axes)
+
+ @pytest.mark.parametrize(
+ "layout, exp_layout",
+ [
+ [(2, 1), (2, 2)],
+ [(2, -1), (2, 2)],
+ [(-1, 2), (2, 2)],
+ ],
+ )
+ def test_subplots_multiple_axes_2_dim(self, layout, exp_layout):
+ # GH 5353, 6970, GH 7069
+ # pass 2-dim axes and invalid layout
+ # invalid lauout should not affect to input and return value
+ # (show warning is tested in
+ # TestDataFrameGroupByPlots.test_grouped_box_multiple_axes
+ _, axes = mpl.pyplot.subplots(2, 2)
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 4)),
+ index=list(string.ascii_letters[:10]),
+ )
+ with tm.assert_produces_warning(UserWarning):
+ returned = df.plot(
+ subplots=True, ax=axes, layout=layout, sharex=False, sharey=False
+ )
+ _check_axes_shape(returned, axes_num=4, layout=exp_layout)
+ assert returned.shape == (4,)
+
+ def test_subplots_multiple_axes_single_col(self):
+ # GH 5353, 6970, GH 7069
+ # single column
+ _, axes = mpl.pyplot.subplots(1, 1)
+ df = DataFrame(
+ np.random.default_rng(2).random((10, 1)),
+ index=list(string.ascii_letters[:10]),
+ )
+
+ axes = df.plot(subplots=True, ax=[axes], sharex=False, sharey=False)
+ _check_axes_shape(axes, axes_num=1, layout=(1, 1))
+ assert axes.shape == (1,)
+
+ def test_subplots_ts_share_axes(self):
+ # GH 3964
+ _, axes = mpl.pyplot.subplots(3, 3, sharex=True, sharey=True)
+ mpl.pyplot.subplots_adjust(left=0.05, right=0.95, hspace=0.3, wspace=0.3)
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 9)),
+ index=date_range(start="2014-07-01", freq="ME", periods=10),
+ )
+ for i, ax in enumerate(axes.ravel()):
+ df[i].plot(ax=ax, fontsize=5)
+
+ # Rows other than bottom should not be visible
+ for ax in axes[0:-1].ravel():
+ _check_visible(ax.get_xticklabels(), visible=False)
+
+ # Bottom row should be visible
+ for ax in axes[-1].ravel():
+ _check_visible(ax.get_xticklabels(), visible=True)
+
+ # First column should be visible
+ for ax in axes[[0, 1, 2], [0]].ravel():
+ _check_visible(ax.get_yticklabels(), visible=True)
+
+ # Other columns should not be visible
+ for ax in axes[[0, 1, 2], [1]].ravel():
+ _check_visible(ax.get_yticklabels(), visible=False)
+ for ax in axes[[0, 1, 2], [2]].ravel():
+ _check_visible(ax.get_yticklabels(), visible=False)
+
+ def test_subplots_sharex_axes_existing_axes(self):
+ # GH 9158
+ d = {"A": [1.0, 2.0, 3.0, 4.0], "B": [4.0, 3.0, 2.0, 1.0], "C": [5, 1, 3, 4]}
+ df = DataFrame(d, index=date_range("2014 10 11", "2014 10 14"))
+
+ axes = df[["A", "B"]].plot(subplots=True)
+ df["C"].plot(ax=axes[0], secondary_y=True)
+
+ _check_visible(axes[0].get_xticklabels(), visible=False)
+ _check_visible(axes[1].get_xticklabels(), visible=True)
+ for ax in axes.ravel():
+ _check_visible(ax.get_yticklabels(), visible=True)
+
+ def test_subplots_dup_columns(self):
+ # GH 10962
+ df = DataFrame(np.random.default_rng(2).random((5, 5)), columns=list("aaaaa"))
+ axes = df.plot(subplots=True)
+ for ax in axes:
+ _check_legend_labels(ax, labels=["a"])
+ assert len(ax.lines) == 1
+
+ def test_subplots_dup_columns_secondary_y(self):
+ # GH 10962
+ df = DataFrame(np.random.default_rng(2).random((5, 5)), columns=list("aaaaa"))
+ axes = df.plot(subplots=True, secondary_y="a")
+ for ax in axes:
+ # (right) is only attached when subplots=False
+ _check_legend_labels(ax, labels=["a"])
+ assert len(ax.lines) == 1
+
+ def test_subplots_dup_columns_secondary_y_no_subplot(self):
+ # GH 10962
+ df = DataFrame(np.random.default_rng(2).random((5, 5)), columns=list("aaaaa"))
+ ax = df.plot(secondary_y="a")
+ _check_legend_labels(ax, labels=["a (right)"] * 5)
+ assert len(ax.lines) == 0
+ assert len(ax.right_ax.lines) == 5
+
+ @pytest.mark.xfail(
+ np_version_gte1p24 and is_platform_linux(),
+ reason="Weird rounding problems",
+ strict=False,
+ )
+ def test_bar_log_no_subplots(self):
+ # GH3254, GH3298 matplotlib/matplotlib#1882, #1892
+ # regressions in 1.2.1
+ expected = np.array([0.1, 1.0, 10.0, 100])
+
+ # no subplots
+ df = DataFrame({"A": [3] * 5, "B": list(range(1, 6))}, index=range(5))
+ ax = df.plot.bar(grid=True, log=True)
+ tm.assert_numpy_array_equal(ax.yaxis.get_ticklocs(), expected)
+
+ @pytest.mark.xfail(
+ np_version_gte1p24 and is_platform_linux(),
+ reason="Weird rounding problems",
+ strict=False,
+ )
+ def test_bar_log_subplots(self):
+ expected = np.array([0.1, 1.0, 10.0, 100.0, 1000.0, 1e4])
+
+ ax = DataFrame([Series([200, 300]), Series([300, 500])]).plot.bar(
+ log=True, subplots=True
+ )
+
+ tm.assert_numpy_array_equal(ax[0].yaxis.get_ticklocs(), expected)
+ tm.assert_numpy_array_equal(ax[1].yaxis.get_ticklocs(), expected)
+
+ def test_boxplot_subplots_return_type_default(self, hist_df):
+ df = hist_df
+
+ # normal style: return_type=None
+ result = df.plot.box(subplots=True)
+ assert isinstance(result, Series)
+ _check_box_return_type(
+ result, None, expected_keys=["height", "weight", "category"]
+ )
+
+ @pytest.mark.parametrize("rt", ["dict", "axes", "both"])
+ def test_boxplot_subplots_return_type(self, hist_df, rt):
+ df = hist_df
+ returned = df.plot.box(return_type=rt, subplots=True)
+ _check_box_return_type(
+ returned,
+ rt,
+ expected_keys=["height", "weight", "category"],
+ check_ax_title=False,
+ )
+
+ def test_df_subplots_patterns_minorticks(self):
+ # GH 10657
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 2)),
+ index=date_range("1/1/2000", periods=10),
+ columns=list("AB"),
+ )
+
+ # shared subplots
+ _, axes = plt.subplots(2, 1, sharex=True)
+ axes = df.plot(subplots=True, ax=axes)
+ for ax in axes:
+ assert len(ax.lines) == 1
+ _check_visible(ax.get_yticklabels(), visible=True)
+ # xaxis of 1st ax must be hidden
+ _check_visible(axes[0].get_xticklabels(), visible=False)
+ _check_visible(axes[0].get_xticklabels(minor=True), visible=False)
+ _check_visible(axes[1].get_xticklabels(), visible=True)
+ _check_visible(axes[1].get_xticklabels(minor=True), visible=True)
+
+ def test_df_subplots_patterns_minorticks_1st_ax_hidden(self):
+ # GH 10657
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 2)),
+ index=date_range("1/1/2000", periods=10),
+ columns=list("AB"),
+ )
+ _, axes = plt.subplots(2, 1)
+ with tm.assert_produces_warning(UserWarning):
+ axes = df.plot(subplots=True, ax=axes, sharex=True)
+ for ax in axes:
+ assert len(ax.lines) == 1
+ _check_visible(ax.get_yticklabels(), visible=True)
+ # xaxis of 1st ax must be hidden
+ _check_visible(axes[0].get_xticklabels(), visible=False)
+ _check_visible(axes[0].get_xticklabels(minor=True), visible=False)
+ _check_visible(axes[1].get_xticklabels(), visible=True)
+ _check_visible(axes[1].get_xticklabels(minor=True), visible=True)
+
+ def test_df_subplots_patterns_minorticks_not_shared(self):
+ # GH 10657
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 2)),
+ index=date_range("1/1/2000", periods=10),
+ columns=list("AB"),
+ )
+ # not shared
+ _, axes = plt.subplots(2, 1)
+ axes = df.plot(subplots=True, ax=axes)
+ for ax in axes:
+ assert len(ax.lines) == 1
+ _check_visible(ax.get_yticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(), visible=True)
+ _check_visible(ax.get_xticklabels(minor=True), visible=True)
+
+ def test_subplots_sharex_false(self):
+ # test when sharex is set to False, two plots should have different
+ # labels, GH 25160
+ df = DataFrame(np.random.default_rng(2).random((10, 2)))
+ df.iloc[5:, 1] = np.nan
+ df.iloc[:5, 0] = np.nan
+
+ _, axs = mpl.pyplot.subplots(2, 1)
+ df.plot.line(ax=axs, subplots=True, sharex=False)
+
+ expected_ax1 = np.arange(4.5, 10, 0.5)
+ expected_ax2 = np.arange(-0.5, 5, 0.5)
+
+ tm.assert_numpy_array_equal(axs[0].get_xticks(), expected_ax1)
+ tm.assert_numpy_array_equal(axs[1].get_xticks(), expected_ax2)
+
+ def test_subplots_constrained_layout(self):
+ # GH 25261
+ idx = date_range(start="now", periods=10)
+ df = DataFrame(np.random.default_rng(2).random((10, 3)), index=idx)
+ kwargs = {}
+ if hasattr(mpl.pyplot.Figure, "get_constrained_layout"):
+ kwargs["constrained_layout"] = True
+ _, axes = mpl.pyplot.subplots(2, **kwargs)
+ with tm.assert_produces_warning(None):
+ df.plot(ax=axes[0])
+ with tm.ensure_clean(return_filelike=True) as path:
+ mpl.pyplot.savefig(path)
+
+ @pytest.mark.parametrize(
+ "index_name, old_label, new_label",
+ [
+ (None, "", "new"),
+ ("old", "old", "new"),
+ (None, "", ""),
+ (None, "", 1),
+ (None, "", [1, 2]),
+ ],
+ )
+ @pytest.mark.parametrize("kind", ["line", "area", "bar"])
+ def test_xlabel_ylabel_dataframe_subplots(
+ self, kind, index_name, old_label, new_label
+ ):
+ # GH 9093
+ df = DataFrame([[1, 2], [2, 5]], columns=["Type A", "Type B"])
+ df.index.name = index_name
+
+ # default is the ylabel is not shown and xlabel is index name
+ axes = df.plot(kind=kind, subplots=True)
+ assert all(ax.get_ylabel() == "" for ax in axes)
+ assert all(ax.get_xlabel() == old_label for ax in axes)
+
+ # old xlabel will be overridden and assigned ylabel will be used as ylabel
+ axes = df.plot(kind=kind, ylabel=new_label, xlabel=new_label, subplots=True)
+ assert all(ax.get_ylabel() == str(new_label) for ax in axes)
+ assert all(ax.get_xlabel() == str(new_label) for ax in axes)
+
+ @pytest.mark.parametrize(
+ "kwargs",
+ [
+ # stacked center
+ {"kind": "bar", "stacked": True},
+ {"kind": "bar", "stacked": True, "width": 0.9},
+ {"kind": "barh", "stacked": True},
+ {"kind": "barh", "stacked": True, "width": 0.9},
+ # center
+ {"kind": "bar", "stacked": False},
+ {"kind": "bar", "stacked": False, "width": 0.9},
+ {"kind": "barh", "stacked": False},
+ {"kind": "barh", "stacked": False, "width": 0.9},
+ # subplots center
+ {"kind": "bar", "subplots": True},
+ {"kind": "bar", "subplots": True, "width": 0.9},
+ {"kind": "barh", "subplots": True},
+ {"kind": "barh", "subplots": True, "width": 0.9},
+ # align edge
+ {"kind": "bar", "stacked": True, "align": "edge"},
+ {"kind": "bar", "stacked": True, "width": 0.9, "align": "edge"},
+ {"kind": "barh", "stacked": True, "align": "edge"},
+ {"kind": "barh", "stacked": True, "width": 0.9, "align": "edge"},
+ {"kind": "bar", "stacked": False, "align": "edge"},
+ {"kind": "bar", "stacked": False, "width": 0.9, "align": "edge"},
+ {"kind": "barh", "stacked": False, "align": "edge"},
+ {"kind": "barh", "stacked": False, "width": 0.9, "align": "edge"},
+ {"kind": "bar", "subplots": True, "align": "edge"},
+ {"kind": "bar", "subplots": True, "width": 0.9, "align": "edge"},
+ {"kind": "barh", "subplots": True, "align": "edge"},
+ {"kind": "barh", "subplots": True, "width": 0.9, "align": "edge"},
+ ],
+ )
+ def test_bar_align_multiple_columns(self, kwargs):
+ # GH2157
+ df = DataFrame({"A": [3] * 5, "B": list(range(5))}, index=range(5))
+ self._check_bar_alignment(df, **kwargs)
+
+ @pytest.mark.parametrize(
+ "kwargs",
+ [
+ {"kind": "bar", "stacked": False},
+ {"kind": "bar", "stacked": True},
+ {"kind": "barh", "stacked": False},
+ {"kind": "barh", "stacked": True},
+ {"kind": "bar", "subplots": True},
+ {"kind": "barh", "subplots": True},
+ ],
+ )
+ def test_bar_align_single_column(self, kwargs):
+ df = DataFrame(np.random.default_rng(2).standard_normal(5))
+ self._check_bar_alignment(df, **kwargs)
+
+ @pytest.mark.parametrize(
+ "kwargs",
+ [
+ {"kind": "bar", "stacked": False},
+ {"kind": "bar", "stacked": True},
+ {"kind": "barh", "stacked": False},
+ {"kind": "barh", "stacked": True},
+ {"kind": "bar", "subplots": True},
+ {"kind": "barh", "subplots": True},
+ ],
+ )
+ def test_bar_barwidth_position(self, kwargs):
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ self._check_bar_alignment(df, width=0.9, position=0.2, **kwargs)
+
+ @pytest.mark.parametrize("w", [1, 1.0])
+ def test_bar_barwidth_position_int(self, w):
+ # GH 12979
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ ax = df.plot.bar(stacked=True, width=w)
+ ticks = ax.xaxis.get_ticklocs()
+ tm.assert_numpy_array_equal(ticks, np.array([0, 1, 2, 3, 4]))
+ assert ax.get_xlim() == (-0.75, 4.75)
+ # check left-edge of bars
+ assert ax.patches[0].get_x() == -0.5
+ assert ax.patches[-1].get_x() == 3.5
+
+ @pytest.mark.parametrize(
+ "kind, kwargs",
+ [
+ ["bar", {"stacked": True}],
+ ["barh", {"stacked": False}],
+ ["barh", {"stacked": True}],
+ ["bar", {"subplots": True}],
+ ["barh", {"subplots": True}],
+ ],
+ )
+ def test_bar_barwidth_position_int_width_1(self, kind, kwargs):
+ # GH 12979
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 5)))
+ self._check_bar_alignment(df, kind=kind, width=1, **kwargs)
+
+ def _check_bar_alignment(
+ self,
+ df,
+ kind="bar",
+ stacked=False,
+ subplots=False,
+ align="center",
+ width=0.5,
+ position=0.5,
+ ):
+ axes = df.plot(
+ kind=kind,
+ stacked=stacked,
+ subplots=subplots,
+ align=align,
+ width=width,
+ position=position,
+ grid=True,
+ )
+
+ axes = _flatten_visible(axes)
+
+ for ax in axes:
+ if kind == "bar":
+ axis = ax.xaxis
+ ax_min, ax_max = ax.get_xlim()
+ min_edge = min(p.get_x() for p in ax.patches)
+ max_edge = max(p.get_x() + p.get_width() for p in ax.patches)
+ elif kind == "barh":
+ axis = ax.yaxis
+ ax_min, ax_max = ax.get_ylim()
+ min_edge = min(p.get_y() for p in ax.patches)
+ max_edge = max(p.get_y() + p.get_height() for p in ax.patches)
+ else:
+ raise ValueError
+
+ # GH 7498
+ # compare margins between lim and bar edges
+ tm.assert_almost_equal(ax_min, min_edge - 0.25)
+ tm.assert_almost_equal(ax_max, max_edge + 0.25)
+
+ p = ax.patches[0]
+ if kind == "bar" and (stacked is True or subplots is True):
+ edge = p.get_x()
+ center = edge + p.get_width() * position
+ elif kind == "bar" and stacked is False:
+ center = p.get_x() + p.get_width() * len(df.columns) * position
+ edge = p.get_x()
+ elif kind == "barh" and (stacked is True or subplots is True):
+ center = p.get_y() + p.get_height() * position
+ edge = p.get_y()
+ elif kind == "barh" and stacked is False:
+ center = p.get_y() + p.get_height() * len(df.columns) * position
+ edge = p.get_y()
+ else:
+ raise ValueError
+
+ # Check the ticks locates on integer
+ assert (axis.get_ticklocs() == np.arange(len(df))).all()
+
+ if align == "center":
+ # Check whether the bar locates on center
+ tm.assert_almost_equal(axis.get_ticklocs()[0], center)
+ elif align == "edge":
+ # Check whether the bar's edge starts from the tick
+ tm.assert_almost_equal(axis.get_ticklocs()[0], edge)
+ else:
+ raise ValueError
+
+ return axes
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_hist_box_by.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_hist_box_by.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9250fa8347cc04fa34c28b016e1fb27d837284f
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/frame/test_hist_box_by.py
@@ -0,0 +1,342 @@
+import re
+
+import numpy as np
+import pytest
+
+from pandas import DataFrame
+import pandas._testing as tm
+from pandas.tests.plotting.common import (
+ _check_axes_shape,
+ _check_plot_works,
+ get_x_axis,
+ get_y_axis,
+)
+
+pytest.importorskip("matplotlib")
+
+
+@pytest.fixture
+def hist_df():
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((30, 2)), columns=["A", "B"]
+ )
+ df["C"] = np.random.default_rng(2).choice(["a", "b", "c"], 30)
+ df["D"] = np.random.default_rng(2).choice(["a", "b", "c"], 30)
+ return df
+
+
+class TestHistWithBy:
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "by, column, titles, legends",
+ [
+ ("C", "A", ["a", "b", "c"], [["A"]] * 3),
+ ("C", ["A", "B"], ["a", "b", "c"], [["A", "B"]] * 3),
+ ("C", None, ["a", "b", "c"], [["A", "B"]] * 3),
+ (
+ ["C", "D"],
+ "A",
+ [
+ "(a, a)",
+ "(b, b)",
+ "(c, c)",
+ ],
+ [["A"]] * 3,
+ ),
+ (
+ ["C", "D"],
+ ["A", "B"],
+ [
+ "(a, a)",
+ "(b, b)",
+ "(c, c)",
+ ],
+ [["A", "B"]] * 3,
+ ),
+ (
+ ["C", "D"],
+ None,
+ [
+ "(a, a)",
+ "(b, b)",
+ "(c, c)",
+ ],
+ [["A", "B"]] * 3,
+ ),
+ ],
+ )
+ def test_hist_plot_by_argument(self, by, column, titles, legends, hist_df):
+ # GH 15079
+ axes = _check_plot_works(
+ hist_df.plot.hist, column=column, by=by, default_axes=True
+ )
+ result_titles = [ax.get_title() for ax in axes]
+ result_legends = [
+ [legend.get_text() for legend in ax.get_legend().texts] for ax in axes
+ ]
+
+ assert result_legends == legends
+ assert result_titles == titles
+
+ @pytest.mark.parametrize(
+ "by, column, titles, legends",
+ [
+ (0, "A", ["a", "b", "c"], [["A"]] * 3),
+ (0, None, ["a", "b", "c"], [["A", "B"]] * 3),
+ (
+ [0, "D"],
+ "A",
+ [
+ "(a, a)",
+ "(b, b)",
+ "(c, c)",
+ ],
+ [["A"]] * 3,
+ ),
+ ],
+ )
+ def test_hist_plot_by_0(self, by, column, titles, legends, hist_df):
+ # GH 15079
+ df = hist_df.copy()
+ df = df.rename(columns={"C": 0})
+
+ axes = _check_plot_works(df.plot.hist, default_axes=True, column=column, by=by)
+ result_titles = [ax.get_title() for ax in axes]
+ result_legends = [
+ [legend.get_text() for legend in ax.get_legend().texts] for ax in axes
+ ]
+
+ assert result_legends == legends
+ assert result_titles == titles
+
+ @pytest.mark.parametrize(
+ "by, column",
+ [
+ ([], ["A"]),
+ ([], ["A", "B"]),
+ ((), None),
+ ((), ["A", "B"]),
+ ],
+ )
+ def test_hist_plot_empty_list_string_tuple_by(self, by, column, hist_df):
+ # GH 15079
+ msg = "No group keys passed"
+ with pytest.raises(ValueError, match=msg):
+ _check_plot_works(
+ hist_df.plot.hist, default_axes=True, column=column, by=by
+ )
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "by, column, layout, axes_num",
+ [
+ (["C"], "A", (2, 2), 3),
+ ("C", "A", (2, 2), 3),
+ (["C"], ["A"], (1, 3), 3),
+ ("C", None, (3, 1), 3),
+ ("C", ["A", "B"], (3, 1), 3),
+ (["C", "D"], "A", (9, 1), 3),
+ (["C", "D"], "A", (3, 3), 3),
+ (["C", "D"], ["A"], (5, 2), 3),
+ (["C", "D"], ["A", "B"], (9, 1), 3),
+ (["C", "D"], None, (9, 1), 3),
+ (["C", "D"], ["A", "B"], (5, 2), 3),
+ ],
+ )
+ def test_hist_plot_layout_with_by(self, by, column, layout, axes_num, hist_df):
+ # GH 15079
+ # _check_plot_works adds an ax so catch warning. see GH #13188
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ axes = _check_plot_works(
+ hist_df.plot.hist, column=column, by=by, layout=layout
+ )
+ _check_axes_shape(axes, axes_num=axes_num, layout=layout)
+
+ @pytest.mark.parametrize(
+ "msg, by, layout",
+ [
+ ("larger than required size", ["C", "D"], (1, 1)),
+ (re.escape("Layout must be a tuple of (rows, columns)"), "C", (1,)),
+ ("At least one dimension of layout must be positive", "C", (-1, -1)),
+ ],
+ )
+ def test_hist_plot_invalid_layout_with_by_raises(self, msg, by, layout, hist_df):
+ # GH 15079, test if error is raised when invalid layout is given
+
+ with pytest.raises(ValueError, match=msg):
+ hist_df.plot.hist(column=["A", "B"], by=by, layout=layout)
+
+ @pytest.mark.slow
+ def test_axis_share_x_with_by(self, hist_df):
+ # GH 15079
+ ax1, ax2, ax3 = hist_df.plot.hist(column="A", by="C", sharex=True)
+
+ # share x
+ assert get_x_axis(ax1).joined(ax1, ax2)
+ assert get_x_axis(ax2).joined(ax1, ax2)
+ assert get_x_axis(ax3).joined(ax1, ax3)
+ assert get_x_axis(ax3).joined(ax2, ax3)
+
+ # don't share y
+ assert not get_y_axis(ax1).joined(ax1, ax2)
+ assert not get_y_axis(ax2).joined(ax1, ax2)
+ assert not get_y_axis(ax3).joined(ax1, ax3)
+ assert not get_y_axis(ax3).joined(ax2, ax3)
+
+ @pytest.mark.slow
+ def test_axis_share_y_with_by(self, hist_df):
+ # GH 15079
+ ax1, ax2, ax3 = hist_df.plot.hist(column="A", by="C", sharey=True)
+
+ # share y
+ assert get_y_axis(ax1).joined(ax1, ax2)
+ assert get_y_axis(ax2).joined(ax1, ax2)
+ assert get_y_axis(ax3).joined(ax1, ax3)
+ assert get_y_axis(ax3).joined(ax2, ax3)
+
+ # don't share x
+ assert not get_x_axis(ax1).joined(ax1, ax2)
+ assert not get_x_axis(ax2).joined(ax1, ax2)
+ assert not get_x_axis(ax3).joined(ax1, ax3)
+ assert not get_x_axis(ax3).joined(ax2, ax3)
+
+ @pytest.mark.parametrize("figsize", [(12, 8), (20, 10)])
+ def test_figure_shape_hist_with_by(self, figsize, hist_df):
+ # GH 15079
+ axes = hist_df.plot.hist(column="A", by="C", figsize=figsize)
+ _check_axes_shape(axes, axes_num=3, figsize=figsize)
+
+
+class TestBoxWithBy:
+ @pytest.mark.parametrize(
+ "by, column, titles, xticklabels",
+ [
+ ("C", "A", ["A"], [["a", "b", "c"]]),
+ (
+ ["C", "D"],
+ "A",
+ ["A"],
+ [
+ [
+ "(a, a)",
+ "(b, b)",
+ "(c, c)",
+ ]
+ ],
+ ),
+ ("C", ["A", "B"], ["A", "B"], [["a", "b", "c"]] * 2),
+ (
+ ["C", "D"],
+ ["A", "B"],
+ ["A", "B"],
+ [
+ [
+ "(a, a)",
+ "(b, b)",
+ "(c, c)",
+ ]
+ ]
+ * 2,
+ ),
+ (["C"], None, ["A", "B"], [["a", "b", "c"]] * 2),
+ ],
+ )
+ def test_box_plot_by_argument(self, by, column, titles, xticklabels, hist_df):
+ # GH 15079
+ axes = _check_plot_works(
+ hist_df.plot.box, default_axes=True, column=column, by=by
+ )
+ result_titles = [ax.get_title() for ax in axes]
+ result_xticklabels = [
+ [label.get_text() for label in ax.get_xticklabels()] for ax in axes
+ ]
+
+ assert result_xticklabels == xticklabels
+ assert result_titles == titles
+
+ @pytest.mark.parametrize(
+ "by, column, titles, xticklabels",
+ [
+ (0, "A", ["A"], [["a", "b", "c"]]),
+ (
+ [0, "D"],
+ "A",
+ ["A"],
+ [
+ [
+ "(a, a)",
+ "(b, b)",
+ "(c, c)",
+ ]
+ ],
+ ),
+ (0, None, ["A", "B"], [["a", "b", "c"]] * 2),
+ ],
+ )
+ def test_box_plot_by_0(self, by, column, titles, xticklabels, hist_df):
+ # GH 15079
+ df = hist_df.copy()
+ df = df.rename(columns={"C": 0})
+
+ axes = _check_plot_works(df.plot.box, default_axes=True, column=column, by=by)
+ result_titles = [ax.get_title() for ax in axes]
+ result_xticklabels = [
+ [label.get_text() for label in ax.get_xticklabels()] for ax in axes
+ ]
+
+ assert result_xticklabels == xticklabels
+ assert result_titles == titles
+
+ @pytest.mark.parametrize(
+ "by, column",
+ [
+ ([], ["A"]),
+ ((), "A"),
+ ([], None),
+ ((), ["A", "B"]),
+ ],
+ )
+ def test_box_plot_with_none_empty_list_by(self, by, column, hist_df):
+ # GH 15079
+ msg = "No group keys passed"
+ with pytest.raises(ValueError, match=msg):
+ _check_plot_works(hist_df.plot.box, default_axes=True, column=column, by=by)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "by, column, layout, axes_num",
+ [
+ (["C"], "A", (1, 1), 1),
+ ("C", "A", (1, 1), 1),
+ ("C", None, (2, 1), 2),
+ ("C", ["A", "B"], (1, 2), 2),
+ (["C", "D"], "A", (1, 1), 1),
+ (["C", "D"], None, (1, 2), 2),
+ ],
+ )
+ def test_box_plot_layout_with_by(self, by, column, layout, axes_num, hist_df):
+ # GH 15079
+ axes = _check_plot_works(
+ hist_df.plot.box, default_axes=True, column=column, by=by, layout=layout
+ )
+ _check_axes_shape(axes, axes_num=axes_num, layout=layout)
+
+ @pytest.mark.parametrize(
+ "msg, by, layout",
+ [
+ ("larger than required size", ["C", "D"], (1, 1)),
+ (re.escape("Layout must be a tuple of (rows, columns)"), "C", (1,)),
+ ("At least one dimension of layout must be positive", "C", (-1, -1)),
+ ],
+ )
+ def test_box_plot_invalid_layout_with_by_raises(self, msg, by, layout, hist_df):
+ # GH 15079, test if error is raised when invalid layout is given
+
+ with pytest.raises(ValueError, match=msg):
+ hist_df.plot.box(column=["A", "B"], by=by, layout=layout)
+
+ @pytest.mark.parametrize("figsize", [(12, 8), (20, 10)])
+ def test_figure_shape_hist_with_by(self, figsize, hist_df):
+ # GH 15079
+ axes = hist_df.plot.box(column="A", by="C", figsize=figsize)
+ _check_axes_shape(axes, axes_num=1, figsize=figsize)
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_converter.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_converter.py
new file mode 100644
index 0000000000000000000000000000000000000000..f748d7c5fc758045fc5d3475b94e376a06f5269b
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_converter.py
@@ -0,0 +1,410 @@
+from datetime import (
+ date,
+ datetime,
+)
+import subprocess
+import sys
+
+import numpy as np
+import pytest
+
+import pandas._config.config as cf
+
+from pandas._libs.tslibs import to_offset
+
+from pandas import (
+ Index,
+ Period,
+ PeriodIndex,
+ Series,
+ Timestamp,
+ arrays,
+ date_range,
+)
+import pandas._testing as tm
+
+from pandas.plotting import (
+ deregister_matplotlib_converters,
+ register_matplotlib_converters,
+)
+from pandas.tseries.offsets import (
+ Day,
+ Micro,
+ Milli,
+ Second,
+)
+
+try:
+ from pandas.plotting._matplotlib import converter
+except ImportError:
+ # try / except, rather than skip, to avoid internal refactoring
+ # causing an improper skip
+ pass
+
+pytest.importorskip("matplotlib.pyplot")
+dates = pytest.importorskip("matplotlib.dates")
+
+
+@pytest.mark.single_cpu
+def test_registry_mpl_resets():
+ # Check that Matplotlib converters are properly reset (see issue #27481)
+ code = (
+ "import matplotlib.units as units; "
+ "import matplotlib.dates as mdates; "
+ "n_conv = len(units.registry); "
+ "import pandas as pd; "
+ "pd.plotting.register_matplotlib_converters(); "
+ "pd.plotting.deregister_matplotlib_converters(); "
+ "assert len(units.registry) == n_conv"
+ )
+ call = [sys.executable, "-c", code]
+ subprocess.check_output(call)
+
+
+def test_timtetonum_accepts_unicode():
+ assert converter.time2num("00:01") == converter.time2num("00:01")
+
+
+class TestRegistration:
+ @pytest.mark.single_cpu
+ def test_dont_register_by_default(self):
+ # Run in subprocess to ensure a clean state
+ code = (
+ "import matplotlib.units; "
+ "import pandas as pd; "
+ "units = dict(matplotlib.units.registry); "
+ "assert pd.Timestamp not in units"
+ )
+ call = [sys.executable, "-c", code]
+ assert subprocess.check_call(call) == 0
+
+ def test_registering_no_warning(self):
+ plt = pytest.importorskip("matplotlib.pyplot")
+ s = Series(range(12), index=date_range("2017", periods=12))
+ _, ax = plt.subplots()
+
+ # Set to the "warn" state, in case this isn't the first test run
+ register_matplotlib_converters()
+ ax.plot(s.index, s.values)
+ plt.close()
+
+ def test_pandas_plots_register(self):
+ plt = pytest.importorskip("matplotlib.pyplot")
+ s = Series(range(12), index=date_range("2017", periods=12))
+ # Set to the "warn" state, in case this isn't the first test run
+ with tm.assert_produces_warning(None) as w:
+ s.plot()
+
+ try:
+ assert len(w) == 0
+ finally:
+ plt.close()
+
+ def test_matplotlib_formatters(self):
+ units = pytest.importorskip("matplotlib.units")
+
+ # Can't make any assertion about the start state.
+ # We we check that toggling converters off removes it, and toggling it
+ # on restores it.
+
+ with cf.option_context("plotting.matplotlib.register_converters", True):
+ with cf.option_context("plotting.matplotlib.register_converters", False):
+ assert Timestamp not in units.registry
+ assert Timestamp in units.registry
+
+ def test_option_no_warning(self):
+ pytest.importorskip("matplotlib.pyplot")
+ ctx = cf.option_context("plotting.matplotlib.register_converters", False)
+ plt = pytest.importorskip("matplotlib.pyplot")
+ s = Series(range(12), index=date_range("2017", periods=12))
+ _, ax = plt.subplots()
+
+ # Test without registering first, no warning
+ with ctx:
+ ax.plot(s.index, s.values)
+
+ # Now test with registering
+ register_matplotlib_converters()
+ with ctx:
+ ax.plot(s.index, s.values)
+ plt.close()
+
+ def test_registry_resets(self):
+ units = pytest.importorskip("matplotlib.units")
+ dates = pytest.importorskip("matplotlib.dates")
+
+ # make a copy, to reset to
+ original = dict(units.registry)
+
+ try:
+ # get to a known state
+ units.registry.clear()
+ date_converter = dates.DateConverter()
+ units.registry[datetime] = date_converter
+ units.registry[date] = date_converter
+
+ register_matplotlib_converters()
+ assert units.registry[date] is not date_converter
+ deregister_matplotlib_converters()
+ assert units.registry[date] is date_converter
+
+ finally:
+ # restore original stater
+ units.registry.clear()
+ for k, v in original.items():
+ units.registry[k] = v
+
+
+class TestDateTimeConverter:
+ @pytest.fixture
+ def dtc(self):
+ return converter.DatetimeConverter()
+
+ def test_convert_accepts_unicode(self, dtc):
+ r1 = dtc.convert("2000-01-01 12:22", None, None)
+ r2 = dtc.convert("2000-01-01 12:22", None, None)
+ assert r1 == r2, "DatetimeConverter.convert should accept unicode"
+
+ def test_conversion(self, dtc):
+ rs = dtc.convert(["2012-1-1"], None, None)[0]
+ xp = dates.date2num(datetime(2012, 1, 1))
+ assert rs == xp
+
+ rs = dtc.convert("2012-1-1", None, None)
+ assert rs == xp
+
+ rs = dtc.convert(date(2012, 1, 1), None, None)
+ assert rs == xp
+
+ rs = dtc.convert("2012-1-1", None, None)
+ assert rs == xp
+
+ rs = dtc.convert(Timestamp("2012-1-1"), None, None)
+ assert rs == xp
+
+ # also testing datetime64 dtype (GH8614)
+ rs = dtc.convert("2012-01-01", None, None)
+ assert rs == xp
+
+ rs = dtc.convert("2012-01-01 00:00:00+0000", None, None)
+ assert rs == xp
+
+ rs = dtc.convert(
+ np.array(["2012-01-01 00:00:00+0000", "2012-01-02 00:00:00+0000"]),
+ None,
+ None,
+ )
+ assert rs[0] == xp
+
+ # we have a tz-aware date (constructed to that when we turn to utc it
+ # is the same as our sample)
+ ts = Timestamp("2012-01-01").tz_localize("UTC").tz_convert("US/Eastern")
+ rs = dtc.convert(ts, None, None)
+ assert rs == xp
+
+ rs = dtc.convert(ts.to_pydatetime(), None, None)
+ assert rs == xp
+
+ rs = dtc.convert(Index([ts - Day(1), ts]), None, None)
+ assert rs[1] == xp
+
+ rs = dtc.convert(Index([ts - Day(1), ts]).to_pydatetime(), None, None)
+ assert rs[1] == xp
+
+ def test_conversion_float(self, dtc):
+ rtol = 0.5 * 10**-9
+
+ rs = dtc.convert(Timestamp("2012-1-1 01:02:03", tz="UTC"), None, None)
+ xp = converter.mdates.date2num(Timestamp("2012-1-1 01:02:03", tz="UTC"))
+ tm.assert_almost_equal(rs, xp, rtol=rtol)
+
+ rs = dtc.convert(
+ Timestamp("2012-1-1 09:02:03", tz="Asia/Hong_Kong"), None, None
+ )
+ tm.assert_almost_equal(rs, xp, rtol=rtol)
+
+ rs = dtc.convert(datetime(2012, 1, 1, 1, 2, 3), None, None)
+ tm.assert_almost_equal(rs, xp, rtol=rtol)
+
+ @pytest.mark.parametrize(
+ "values",
+ [
+ [date(1677, 1, 1), date(1677, 1, 2)],
+ [datetime(1677, 1, 1, 12), datetime(1677, 1, 2, 12)],
+ ],
+ )
+ def test_conversion_outofbounds_datetime(self, dtc, values):
+ # 2579
+ rs = dtc.convert(values, None, None)
+ xp = converter.mdates.date2num(values)
+ tm.assert_numpy_array_equal(rs, xp)
+ rs = dtc.convert(values[0], None, None)
+ xp = converter.mdates.date2num(values[0])
+ assert rs == xp
+
+ @pytest.mark.parametrize(
+ "time,format_expected",
+ [
+ (0, "00:00"), # time2num(datetime.time.min)
+ (86399.999999, "23:59:59.999999"), # time2num(datetime.time.max)
+ (90000, "01:00"),
+ (3723, "01:02:03"),
+ (39723.2, "11:02:03.200"),
+ ],
+ )
+ def test_time_formatter(self, time, format_expected):
+ # issue 18478
+ result = converter.TimeFormatter(None)(time)
+ assert result == format_expected
+
+ @pytest.mark.parametrize("freq", ("B", "ms", "s"))
+ def test_dateindex_conversion(self, freq, dtc):
+ rtol = 10**-9
+ dateindex = date_range("2020-01-01", periods=10, freq=freq)
+ rs = dtc.convert(dateindex, None, None)
+ xp = converter.mdates.date2num(dateindex._mpl_repr())
+ tm.assert_almost_equal(rs, xp, rtol=rtol)
+
+ @pytest.mark.parametrize("offset", [Second(), Milli(), Micro(50)])
+ def test_resolution(self, offset, dtc):
+ # Matplotlib's time representation using floats cannot distinguish
+ # intervals smaller than ~10 microsecond in the common range of years.
+ ts1 = Timestamp("2012-1-1")
+ ts2 = ts1 + offset
+ val1 = dtc.convert(ts1, None, None)
+ val2 = dtc.convert(ts2, None, None)
+ if not val1 < val2:
+ raise AssertionError(f"{val1} is not less than {val2}.")
+
+ def test_convert_nested(self, dtc):
+ inner = [Timestamp("2017-01-01"), Timestamp("2017-01-02")]
+ data = [inner, inner]
+ result = dtc.convert(data, None, None)
+ expected = [dtc.convert(x, None, None) for x in data]
+ assert (np.array(result) == expected).all()
+
+
+class TestPeriodConverter:
+ @pytest.fixture
+ def pc(self):
+ return converter.PeriodConverter()
+
+ @pytest.fixture
+ def axis(self):
+ class Axis:
+ pass
+
+ axis = Axis()
+ axis.freq = "D"
+ return axis
+
+ def test_convert_accepts_unicode(self, pc, axis):
+ r1 = pc.convert("2012-1-1", None, axis)
+ r2 = pc.convert("2012-1-1", None, axis)
+ assert r1 == r2
+
+ def test_conversion(self, pc, axis):
+ rs = pc.convert(["2012-1-1"], None, axis)[0]
+ xp = Period("2012-1-1").ordinal
+ assert rs == xp
+
+ rs = pc.convert("2012-1-1", None, axis)
+ assert rs == xp
+
+ rs = pc.convert([date(2012, 1, 1)], None, axis)[0]
+ assert rs == xp
+
+ rs = pc.convert(date(2012, 1, 1), None, axis)
+ assert rs == xp
+
+ rs = pc.convert([Timestamp("2012-1-1")], None, axis)[0]
+ assert rs == xp
+
+ rs = pc.convert(Timestamp("2012-1-1"), None, axis)
+ assert rs == xp
+
+ rs = pc.convert("2012-01-01", None, axis)
+ assert rs == xp
+
+ rs = pc.convert("2012-01-01 00:00:00+0000", None, axis)
+ assert rs == xp
+
+ rs = pc.convert(
+ np.array(
+ ["2012-01-01 00:00:00", "2012-01-02 00:00:00"],
+ dtype="datetime64[ns]",
+ ),
+ None,
+ axis,
+ )
+ assert rs[0] == xp
+
+ def test_integer_passthrough(self, pc, axis):
+ # GH9012
+ rs = pc.convert([0, 1], None, axis)
+ xp = [0, 1]
+ assert rs == xp
+
+ def test_convert_nested(self, pc, axis):
+ data = ["2012-1-1", "2012-1-2"]
+ r1 = pc.convert([data, data], None, axis)
+ r2 = [pc.convert(data, None, axis) for _ in range(2)]
+ assert r1 == r2
+
+
+class TestTimeDeltaConverter:
+ """Test timedelta converter"""
+
+ @pytest.mark.parametrize(
+ "x, decimal, format_expected",
+ [
+ (0.0, 0, "00:00:00"),
+ (3972320000000, 1, "01:06:12.3"),
+ (713233432000000, 2, "8 days 06:07:13.43"),
+ (32423432000000, 4, "09:00:23.4320"),
+ ],
+ )
+ def test_format_timedelta_ticks(self, x, decimal, format_expected):
+ tdc = converter.TimeSeries_TimedeltaFormatter
+ result = tdc.format_timedelta_ticks(x, pos=None, n_decimals=decimal)
+ assert result == format_expected
+
+ @pytest.mark.parametrize("view_interval", [(1, 2), (2, 1)])
+ def test_call_w_different_view_intervals(self, view_interval, monkeypatch):
+ # previously broke on reversed xlmits; see GH37454
+ class mock_axis:
+ def get_view_interval(self):
+ return view_interval
+
+ tdc = converter.TimeSeries_TimedeltaFormatter()
+ monkeypatch.setattr(tdc, "axis", mock_axis())
+ tdc(0.0, 0)
+
+
+@pytest.mark.parametrize("year_span", [11.25, 30, 80, 150, 400, 800, 1500, 2500, 3500])
+# The range is limited to 11.25 at the bottom by if statements in
+# the _quarterly_finder() function
+def test_quarterly_finder(year_span):
+ vmin = -1000
+ vmax = vmin + year_span * 4
+ span = vmax - vmin + 1
+ if span < 45:
+ pytest.skip("the quarterly finder is only invoked if the span is >= 45")
+ nyears = span / 4
+ (min_anndef, maj_anndef) = converter._get_default_annual_spacing(nyears)
+ result = converter._quarterly_finder(vmin, vmax, to_offset("QE"))
+ quarters = PeriodIndex(
+ arrays.PeriodArray(np.array([x[0] for x in result]), dtype="period[Q]")
+ )
+ majors = np.array([x[1] for x in result])
+ minors = np.array([x[2] for x in result])
+ major_quarters = quarters[majors]
+ minor_quarters = quarters[minors]
+ check_major_years = major_quarters.year % maj_anndef == 0
+ check_minor_years = minor_quarters.year % min_anndef == 0
+ check_major_quarters = major_quarters.quarter == 1
+ check_minor_quarters = minor_quarters.quarter == 1
+ assert np.all(check_major_years)
+ assert np.all(check_minor_years)
+ assert np.all(check_major_quarters)
+ assert np.all(check_minor_quarters)
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_hist_method.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_hist_method.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d17f87fdc7bc1456a118c84b76c631544572fd4
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_hist_method.py
@@ -0,0 +1,971 @@
+""" Test cases for .hist method """
+import re
+
+import numpy as np
+import pytest
+
+from pandas import (
+ DataFrame,
+ Index,
+ Series,
+ date_range,
+ to_datetime,
+)
+import pandas._testing as tm
+from pandas.tests.plotting.common import (
+ _check_ax_scales,
+ _check_axes_shape,
+ _check_colors,
+ _check_legend_labels,
+ _check_patches_all_filled,
+ _check_plot_works,
+ _check_text_labels,
+ _check_ticks_props,
+ get_x_axis,
+ get_y_axis,
+)
+
+mpl = pytest.importorskip("matplotlib")
+
+
+@pytest.fixture
+def ts():
+ return Series(
+ np.arange(30, dtype=np.float64),
+ index=date_range("2020-01-01", periods=30, freq="B"),
+ name="ts",
+ )
+
+
+class TestSeriesPlots:
+ @pytest.mark.parametrize("kwargs", [{}, {"grid": False}, {"figsize": (8, 10)}])
+ def test_hist_legacy_kwargs(self, ts, kwargs):
+ _check_plot_works(ts.hist, **kwargs)
+
+ @pytest.mark.parametrize("kwargs", [{}, {"bins": 5}])
+ def test_hist_legacy_kwargs_warning(self, ts, kwargs):
+ # _check_plot_works adds an ax so catch warning. see GH #13188
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ _check_plot_works(ts.hist, by=ts.index.month, **kwargs)
+
+ def test_hist_legacy_ax(self, ts):
+ fig, ax = mpl.pyplot.subplots(1, 1)
+ _check_plot_works(ts.hist, ax=ax, default_axes=True)
+
+ def test_hist_legacy_ax_and_fig(self, ts):
+ fig, ax = mpl.pyplot.subplots(1, 1)
+ _check_plot_works(ts.hist, ax=ax, figure=fig, default_axes=True)
+
+ def test_hist_legacy_fig(self, ts):
+ fig, _ = mpl.pyplot.subplots(1, 1)
+ _check_plot_works(ts.hist, figure=fig, default_axes=True)
+
+ def test_hist_legacy_multi_ax(self, ts):
+ fig, (ax1, ax2) = mpl.pyplot.subplots(1, 2)
+ _check_plot_works(ts.hist, figure=fig, ax=ax1, default_axes=True)
+ _check_plot_works(ts.hist, figure=fig, ax=ax2, default_axes=True)
+
+ def test_hist_legacy_by_fig_error(self, ts):
+ fig, _ = mpl.pyplot.subplots(1, 1)
+ msg = (
+ "Cannot pass 'figure' when using the 'by' argument, since a new 'Figure' "
+ "instance will be created"
+ )
+ with pytest.raises(ValueError, match=msg):
+ ts.hist(by=ts.index, figure=fig)
+
+ def test_hist_bins_legacy(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
+ ax = df.hist(bins=2)[0][0]
+ assert len(ax.patches) == 2
+
+ def test_hist_layout(self, hist_df):
+ df = hist_df
+ msg = "The 'layout' keyword is not supported when 'by' is None"
+ with pytest.raises(ValueError, match=msg):
+ df.height.hist(layout=(1, 1))
+
+ with pytest.raises(ValueError, match=msg):
+ df.height.hist(layout=[1, 1])
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "by, layout, axes_num, res_layout",
+ [
+ ["gender", (2, 1), 2, (2, 1)],
+ ["gender", (3, -1), 2, (3, 1)],
+ ["category", (4, 1), 4, (4, 1)],
+ ["category", (2, -1), 4, (2, 2)],
+ ["category", (3, -1), 4, (3, 2)],
+ ["category", (-1, 4), 4, (1, 4)],
+ ["classroom", (2, 2), 3, (2, 2)],
+ ],
+ )
+ def test_hist_layout_with_by(self, hist_df, by, layout, axes_num, res_layout):
+ df = hist_df
+
+ # _check_plot_works adds an `ax` kwarg to the method call
+ # so we get a warning about an axis being cleared, even
+ # though we don't explicing pass one, see GH #13188
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ axes = _check_plot_works(df.height.hist, by=getattr(df, by), layout=layout)
+ _check_axes_shape(axes, axes_num=axes_num, layout=res_layout)
+
+ def test_hist_layout_with_by_shape(self, hist_df):
+ df = hist_df
+
+ axes = df.height.hist(by=df.category, layout=(4, 2), figsize=(12, 7))
+ _check_axes_shape(axes, axes_num=4, layout=(4, 2), figsize=(12, 7))
+
+ def test_hist_no_overlap(self):
+ from matplotlib.pyplot import (
+ gcf,
+ subplot,
+ )
+
+ x = Series(np.random.default_rng(2).standard_normal(2))
+ y = Series(np.random.default_rng(2).standard_normal(2))
+ subplot(121)
+ x.hist()
+ subplot(122)
+ y.hist()
+ fig = gcf()
+ axes = fig.axes
+ assert len(axes) == 2
+
+ def test_hist_by_no_extra_plots(self, hist_df):
+ df = hist_df
+ df.height.hist(by=df.gender)
+ assert len(mpl.pyplot.get_fignums()) == 1
+
+ def test_plot_fails_when_ax_differs_from_figure(self, ts):
+ from pylab import figure
+
+ fig1 = figure()
+ fig2 = figure()
+ ax1 = fig1.add_subplot(111)
+ msg = "passed axis not bound to passed figure"
+ with pytest.raises(AssertionError, match=msg):
+ ts.hist(ax=ax1, figure=fig2)
+
+ @pytest.mark.parametrize(
+ "histtype, expected",
+ [
+ ("bar", True),
+ ("barstacked", True),
+ ("step", False),
+ ("stepfilled", True),
+ ],
+ )
+ def test_histtype_argument(self, histtype, expected):
+ # GH23992 Verify functioning of histtype argument
+ ser = Series(np.random.default_rng(2).integers(1, 10))
+ ax = ser.hist(histtype=histtype)
+ _check_patches_all_filled(ax, filled=expected)
+
+ @pytest.mark.parametrize(
+ "by, expected_axes_num, expected_layout", [(None, 1, (1, 1)), ("b", 2, (1, 2))]
+ )
+ def test_hist_with_legend(self, by, expected_axes_num, expected_layout):
+ # GH 6279 - Series histogram can have a legend
+ index = 15 * ["1"] + 15 * ["2"]
+ s = Series(np.random.default_rng(2).standard_normal(30), index=index, name="a")
+ s.index.name = "b"
+
+ # Use default_axes=True when plotting method generate subplots itself
+ axes = _check_plot_works(s.hist, default_axes=True, legend=True, by=by)
+ _check_axes_shape(axes, axes_num=expected_axes_num, layout=expected_layout)
+ _check_legend_labels(axes, "a")
+
+ @pytest.mark.parametrize("by", [None, "b"])
+ def test_hist_with_legend_raises(self, by):
+ # GH 6279 - Series histogram with legend and label raises
+ index = 15 * ["1"] + 15 * ["2"]
+ s = Series(np.random.default_rng(2).standard_normal(30), index=index, name="a")
+ s.index.name = "b"
+
+ with pytest.raises(ValueError, match="Cannot use both legend and label"):
+ s.hist(legend=True, by=by, label="c")
+
+ def test_hist_kwargs(self, ts):
+ _, ax = mpl.pyplot.subplots()
+ ax = ts.plot.hist(bins=5, ax=ax)
+ assert len(ax.patches) == 5
+ _check_text_labels(ax.yaxis.get_label(), "Frequency")
+
+ def test_hist_kwargs_horizontal(self, ts):
+ _, ax = mpl.pyplot.subplots()
+ ax = ts.plot.hist(bins=5, ax=ax)
+ ax = ts.plot.hist(orientation="horizontal", ax=ax)
+ _check_text_labels(ax.xaxis.get_label(), "Frequency")
+
+ def test_hist_kwargs_align(self, ts):
+ _, ax = mpl.pyplot.subplots()
+ ax = ts.plot.hist(bins=5, ax=ax)
+ ax = ts.plot.hist(align="left", stacked=True, ax=ax)
+
+ @pytest.mark.xfail(reason="Api changed in 3.6.0")
+ def test_hist_kde(self, ts):
+ pytest.importorskip("scipy")
+ _, ax = mpl.pyplot.subplots()
+ ax = ts.plot.hist(logy=True, ax=ax)
+ _check_ax_scales(ax, yaxis="log")
+ xlabels = ax.get_xticklabels()
+ # ticks are values, thus ticklabels are blank
+ _check_text_labels(xlabels, [""] * len(xlabels))
+ ylabels = ax.get_yticklabels()
+ _check_text_labels(ylabels, [""] * len(ylabels))
+
+ def test_hist_kde_plot_works(self, ts):
+ pytest.importorskip("scipy")
+ _check_plot_works(ts.plot.kde)
+
+ def test_hist_kde_density_works(self, ts):
+ pytest.importorskip("scipy")
+ _check_plot_works(ts.plot.density)
+
+ @pytest.mark.xfail(reason="Api changed in 3.6.0")
+ def test_hist_kde_logy(self, ts):
+ pytest.importorskip("scipy")
+ _, ax = mpl.pyplot.subplots()
+ ax = ts.plot.kde(logy=True, ax=ax)
+ _check_ax_scales(ax, yaxis="log")
+ xlabels = ax.get_xticklabels()
+ _check_text_labels(xlabels, [""] * len(xlabels))
+ ylabels = ax.get_yticklabels()
+ _check_text_labels(ylabels, [""] * len(ylabels))
+
+ def test_hist_kde_color_bins(self, ts):
+ pytest.importorskip("scipy")
+ _, ax = mpl.pyplot.subplots()
+ ax = ts.plot.hist(logy=True, bins=10, color="b", ax=ax)
+ _check_ax_scales(ax, yaxis="log")
+ assert len(ax.patches) == 10
+ _check_colors(ax.patches, facecolors=["b"] * 10)
+
+ def test_hist_kde_color(self, ts):
+ pytest.importorskip("scipy")
+ _, ax = mpl.pyplot.subplots()
+ ax = ts.plot.kde(logy=True, color="r", ax=ax)
+ _check_ax_scales(ax, yaxis="log")
+ lines = ax.get_lines()
+ assert len(lines) == 1
+ _check_colors(lines, ["r"])
+
+
+class TestDataFramePlots:
+ @pytest.mark.slow
+ def test_hist_df_legacy(self, hist_df):
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ _check_plot_works(hist_df.hist)
+
+ @pytest.mark.slow
+ def test_hist_df_legacy_layout(self):
+ # make sure layout is handled
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
+ df[2] = to_datetime(
+ np.random.default_rng(2).integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ axes = _check_plot_works(df.hist, grid=False)
+ _check_axes_shape(axes, axes_num=3, layout=(2, 2))
+ assert not axes[1, 1].get_visible()
+
+ _check_plot_works(df[[2]].hist)
+
+ @pytest.mark.slow
+ def test_hist_df_legacy_layout2(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 1)))
+ _check_plot_works(df.hist)
+
+ @pytest.mark.slow
+ def test_hist_df_legacy_layout3(self):
+ # make sure layout is handled
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 5)))
+ df[5] = to_datetime(
+ np.random.default_rng(2).integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ axes = _check_plot_works(df.hist, layout=(4, 2))
+ _check_axes_shape(axes, axes_num=6, layout=(4, 2))
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "kwargs", [{"sharex": True, "sharey": True}, {"figsize": (8, 10)}, {"bins": 5}]
+ )
+ def test_hist_df_legacy_layout_kwargs(self, kwargs):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 5)))
+ df[5] = to_datetime(
+ np.random.default_rng(2).integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ # make sure sharex, sharey is handled
+ # handle figsize arg
+ # check bins argument
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ _check_plot_works(df.hist, **kwargs)
+
+ @pytest.mark.slow
+ def test_hist_df_legacy_layout_labelsize_rot(self, frame_or_series):
+ # make sure xlabelsize and xrot are handled
+ obj = frame_or_series(range(10))
+ xf, yf = 20, 18
+ xrot, yrot = 30, 40
+ axes = obj.hist(xlabelsize=xf, xrot=xrot, ylabelsize=yf, yrot=yrot)
+ _check_ticks_props(axes, xlabelsize=xf, xrot=xrot, ylabelsize=yf, yrot=yrot)
+
+ @pytest.mark.slow
+ def test_hist_df_legacy_rectangles(self):
+ from matplotlib.patches import Rectangle
+
+ ser = Series(range(10))
+ ax = ser.hist(cumulative=True, bins=4, density=True)
+ # height of last bin (index 5) must be 1.0
+ rects = [x for x in ax.get_children() if isinstance(x, Rectangle)]
+ tm.assert_almost_equal(rects[-1].get_height(), 1.0)
+
+ @pytest.mark.slow
+ def test_hist_df_legacy_scale(self):
+ ser = Series(range(10))
+ ax = ser.hist(log=True)
+ # scale of y must be 'log'
+ _check_ax_scales(ax, yaxis="log")
+
+ @pytest.mark.slow
+ def test_hist_df_legacy_external_error(self):
+ ser = Series(range(10))
+ # propagate attr exception from matplotlib.Axes.hist
+ with tm.external_error_raised(AttributeError):
+ ser.hist(foo="bar")
+
+ def test_hist_non_numerical_or_datetime_raises(self):
+ # gh-10444, GH32590
+ df = DataFrame(
+ {
+ "a": np.random.default_rng(2).random(10),
+ "b": np.random.default_rng(2).integers(0, 10, 10),
+ "c": to_datetime(
+ np.random.default_rng(2).integers(
+ 1582800000000000000, 1583500000000000000, 10, dtype=np.int64
+ )
+ ),
+ "d": to_datetime(
+ np.random.default_rng(2).integers(
+ 1582800000000000000, 1583500000000000000, 10, dtype=np.int64
+ ),
+ utc=True,
+ ),
+ }
+ )
+ df_o = df.astype(object)
+
+ msg = "hist method requires numerical or datetime columns, nothing to plot."
+ with pytest.raises(ValueError, match=msg):
+ df_o.hist()
+
+ @pytest.mark.parametrize(
+ "layout_test",
+ (
+ {"layout": None, "expected_size": (2, 2)}, # default is 2x2
+ {"layout": (2, 2), "expected_size": (2, 2)},
+ {"layout": (4, 1), "expected_size": (4, 1)},
+ {"layout": (1, 4), "expected_size": (1, 4)},
+ {"layout": (3, 3), "expected_size": (3, 3)},
+ {"layout": (-1, 4), "expected_size": (1, 4)},
+ {"layout": (4, -1), "expected_size": (4, 1)},
+ {"layout": (-1, 2), "expected_size": (2, 2)},
+ {"layout": (2, -1), "expected_size": (2, 2)},
+ ),
+ )
+ def test_hist_layout(self, layout_test):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
+ df[2] = to_datetime(
+ np.random.default_rng(2).integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ axes = df.hist(layout=layout_test["layout"])
+ expected = layout_test["expected_size"]
+ _check_axes_shape(axes, axes_num=3, layout=expected)
+
+ def test_hist_layout_error(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
+ df[2] = to_datetime(
+ np.random.default_rng(2).integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ # layout too small for all 4 plots
+ msg = "Layout of 1x1 must be larger than required size 3"
+ with pytest.raises(ValueError, match=msg):
+ df.hist(layout=(1, 1))
+
+ # invalid format for layout
+ msg = re.escape("Layout must be a tuple of (rows, columns)")
+ with pytest.raises(ValueError, match=msg):
+ df.hist(layout=(1,))
+ msg = "At least one dimension of layout must be positive"
+ with pytest.raises(ValueError, match=msg):
+ df.hist(layout=(-1, -1))
+
+ # GH 9351
+ def test_tight_layout(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((100, 2)))
+ df[2] = to_datetime(
+ np.random.default_rng(2).integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=100,
+ dtype=np.int64,
+ )
+ )
+ # Use default_axes=True when plotting method generate subplots itself
+ _check_plot_works(df.hist, default_axes=True)
+ mpl.pyplot.tight_layout()
+
+ def test_hist_subplot_xrot(self):
+ # GH 30288
+ df = DataFrame(
+ {
+ "length": [1.5, 0.5, 1.2, 0.9, 3],
+ "animal": ["pig", "rabbit", "pig", "pig", "rabbit"],
+ }
+ )
+ # Use default_axes=True when plotting method generate subplots itself
+ axes = _check_plot_works(
+ df.hist,
+ default_axes=True,
+ column="length",
+ by="animal",
+ bins=5,
+ xrot=0,
+ )
+ _check_ticks_props(axes, xrot=0)
+
+ @pytest.mark.parametrize(
+ "column, expected",
+ [
+ (None, ["width", "length", "height"]),
+ (["length", "width", "height"], ["length", "width", "height"]),
+ ],
+ )
+ def test_hist_column_order_unchanged(self, column, expected):
+ # GH29235
+
+ df = DataFrame(
+ {
+ "width": [0.7, 0.2, 0.15, 0.2, 1.1],
+ "length": [1.5, 0.5, 1.2, 0.9, 3],
+ "height": [3, 0.5, 3.4, 2, 1],
+ },
+ index=["pig", "rabbit", "duck", "chicken", "horse"],
+ )
+
+ # Use default_axes=True when plotting method generate subplots itself
+ axes = _check_plot_works(
+ df.hist,
+ default_axes=True,
+ column=column,
+ layout=(1, 3),
+ )
+ result = [axes[0, i].get_title() for i in range(3)]
+ assert result == expected
+
+ @pytest.mark.parametrize(
+ "histtype, expected",
+ [
+ ("bar", True),
+ ("barstacked", True),
+ ("step", False),
+ ("stepfilled", True),
+ ],
+ )
+ def test_histtype_argument(self, histtype, expected):
+ # GH23992 Verify functioning of histtype argument
+ df = DataFrame(
+ np.random.default_rng(2).integers(1, 10, size=(100, 2)), columns=["a", "b"]
+ )
+ ax = df.hist(histtype=histtype)
+ _check_patches_all_filled(ax, filled=expected)
+
+ @pytest.mark.parametrize("by", [None, "c"])
+ @pytest.mark.parametrize("column", [None, "b"])
+ def test_hist_with_legend(self, by, column):
+ # GH 6279 - DataFrame histogram can have a legend
+ expected_axes_num = 1 if by is None and column is not None else 2
+ expected_layout = (1, expected_axes_num)
+ expected_labels = column or ["a", "b"]
+ if by is not None:
+ expected_labels = [expected_labels] * 2
+
+ index = Index(15 * ["1"] + 15 * ["2"], name="c")
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((30, 2)),
+ index=index,
+ columns=["a", "b"],
+ )
+
+ # Use default_axes=True when plotting method generate subplots itself
+ axes = _check_plot_works(
+ df.hist,
+ default_axes=True,
+ legend=True,
+ by=by,
+ column=column,
+ )
+
+ _check_axes_shape(axes, axes_num=expected_axes_num, layout=expected_layout)
+ if by is None and column is None:
+ axes = axes[0]
+ for expected_label, ax in zip(expected_labels, axes):
+ _check_legend_labels(ax, expected_label)
+
+ @pytest.mark.parametrize("by", [None, "c"])
+ @pytest.mark.parametrize("column", [None, "b"])
+ def test_hist_with_legend_raises(self, by, column):
+ # GH 6279 - DataFrame histogram with legend and label raises
+ index = Index(15 * ["1"] + 15 * ["2"], name="c")
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((30, 2)),
+ index=index,
+ columns=["a", "b"],
+ )
+
+ with pytest.raises(ValueError, match="Cannot use both legend and label"):
+ df.hist(legend=True, by=by, column=column, label="d")
+
+ def test_hist_df_kwargs(self):
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
+ _, ax = mpl.pyplot.subplots()
+ ax = df.plot.hist(bins=5, ax=ax)
+ assert len(ax.patches) == 10
+
+ def test_hist_df_with_nonnumerics(self):
+ # GH 9853
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=["A", "B", "C", "D"],
+ )
+ df["E"] = ["x", "y"] * 5
+ _, ax = mpl.pyplot.subplots()
+ ax = df.plot.hist(bins=5, ax=ax)
+ assert len(ax.patches) == 20
+
+ def test_hist_df_with_nonnumerics_no_bins(self):
+ # GH 9853
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((10, 4)),
+ columns=["A", "B", "C", "D"],
+ )
+ df["E"] = ["x", "y"] * 5
+ _, ax = mpl.pyplot.subplots()
+ ax = df.plot.hist(ax=ax) # bins=10
+ assert len(ax.patches) == 40
+
+ def test_hist_secondary_legend(self):
+ # GH 9610
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((30, 4)), columns=list("abcd")
+ )
+
+ # primary -> secondary
+ _, ax = mpl.pyplot.subplots()
+ ax = df["a"].plot.hist(legend=True, ax=ax)
+ df["b"].plot.hist(ax=ax, legend=True, secondary_y=True)
+ # both legends are drawn on left ax
+ # left and right axis must be visible
+ _check_legend_labels(ax, labels=["a", "b (right)"])
+ assert ax.get_yaxis().get_visible()
+ assert ax.right_ax.get_yaxis().get_visible()
+
+ def test_hist_secondary_secondary(self):
+ # GH 9610
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((30, 4)), columns=list("abcd")
+ )
+ # secondary -> secondary
+ _, ax = mpl.pyplot.subplots()
+ ax = df["a"].plot.hist(legend=True, secondary_y=True, ax=ax)
+ df["b"].plot.hist(ax=ax, legend=True, secondary_y=True)
+ # both legends are draw on left ax
+ # left axis must be invisible, right axis must be visible
+ _check_legend_labels(ax.left_ax, labels=["a (right)", "b (right)"])
+ assert not ax.left_ax.get_yaxis().get_visible()
+ assert ax.get_yaxis().get_visible()
+
+ def test_hist_secondary_primary(self):
+ # GH 9610
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((30, 4)), columns=list("abcd")
+ )
+ # secondary -> primary
+ _, ax = mpl.pyplot.subplots()
+ ax = df["a"].plot.hist(legend=True, secondary_y=True, ax=ax)
+ # right axes is returned
+ df["b"].plot.hist(ax=ax, legend=True)
+ # both legends are draw on left ax
+ # left and right axis must be visible
+ _check_legend_labels(ax.left_ax, labels=["a (right)", "b"])
+ assert ax.left_ax.get_yaxis().get_visible()
+ assert ax.get_yaxis().get_visible()
+
+ def test_hist_with_nans_and_weights(self):
+ # GH 48884
+ mpl_patches = pytest.importorskip("matplotlib.patches")
+ df = DataFrame(
+ [[np.nan, 0.2, 0.3], [0.4, np.nan, np.nan], [0.7, 0.8, 0.9]],
+ columns=list("abc"),
+ )
+ weights = np.array([0.25, 0.3, 0.45])
+ no_nan_df = DataFrame([[0.4, 0.2, 0.3], [0.7, 0.8, 0.9]], columns=list("abc"))
+ no_nan_weights = np.array([[0.3, 0.25, 0.25], [0.45, 0.45, 0.45]])
+
+ _, ax0 = mpl.pyplot.subplots()
+ df.plot.hist(ax=ax0, weights=weights)
+ rects = [x for x in ax0.get_children() if isinstance(x, mpl_patches.Rectangle)]
+ heights = [rect.get_height() for rect in rects]
+ _, ax1 = mpl.pyplot.subplots()
+ no_nan_df.plot.hist(ax=ax1, weights=no_nan_weights)
+ no_nan_rects = [
+ x for x in ax1.get_children() if isinstance(x, mpl_patches.Rectangle)
+ ]
+ no_nan_heights = [rect.get_height() for rect in no_nan_rects]
+ assert all(h0 == h1 for h0, h1 in zip(heights, no_nan_heights))
+
+ idxerror_weights = np.array([[0.3, 0.25], [0.45, 0.45]])
+
+ msg = "weights must have the same shape as data, or be a single column"
+ with pytest.raises(ValueError, match=msg):
+ _, ax2 = mpl.pyplot.subplots()
+ no_nan_df.plot.hist(ax=ax2, weights=idxerror_weights)
+
+
+class TestDataFrameGroupByPlots:
+ def test_grouped_hist_legacy(self):
+ from pandas.plotting._matplotlib.hist import _grouped_hist
+
+ rs = np.random.default_rng(10)
+ df = DataFrame(rs.standard_normal((10, 1)), columns=["A"])
+ df["B"] = to_datetime(
+ rs.integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ df["C"] = rs.integers(0, 4, 10)
+ df["D"] = ["X"] * 10
+
+ axes = _grouped_hist(df.A, by=df.C)
+ _check_axes_shape(axes, axes_num=4, layout=(2, 2))
+
+ def test_grouped_hist_legacy_axes_shape_no_col(self):
+ rs = np.random.default_rng(10)
+ df = DataFrame(rs.standard_normal((10, 1)), columns=["A"])
+ df["B"] = to_datetime(
+ rs.integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ df["C"] = rs.integers(0, 4, 10)
+ df["D"] = ["X"] * 10
+ axes = df.hist(by=df.C)
+ _check_axes_shape(axes, axes_num=4, layout=(2, 2))
+
+ def test_grouped_hist_legacy_single_key(self):
+ rs = np.random.default_rng(2)
+ df = DataFrame(rs.standard_normal((10, 1)), columns=["A"])
+ df["B"] = to_datetime(
+ rs.integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ df["C"] = rs.integers(0, 4, 10)
+ df["D"] = ["X"] * 10
+ # group by a key with single value
+ axes = df.hist(by="D", rot=30)
+ _check_axes_shape(axes, axes_num=1, layout=(1, 1))
+ _check_ticks_props(axes, xrot=30)
+
+ def test_grouped_hist_legacy_grouped_hist_kwargs(self):
+ from matplotlib.patches import Rectangle
+
+ from pandas.plotting._matplotlib.hist import _grouped_hist
+
+ rs = np.random.default_rng(2)
+ df = DataFrame(rs.standard_normal((10, 1)), columns=["A"])
+ df["B"] = to_datetime(
+ rs.integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ df["C"] = rs.integers(0, 4, 10)
+ # make sure kwargs to hist are handled
+ xf, yf = 20, 18
+ xrot, yrot = 30, 40
+
+ axes = _grouped_hist(
+ df.A,
+ by=df.C,
+ cumulative=True,
+ bins=4,
+ xlabelsize=xf,
+ xrot=xrot,
+ ylabelsize=yf,
+ yrot=yrot,
+ density=True,
+ )
+ # height of last bin (index 5) must be 1.0
+ for ax in axes.ravel():
+ rects = [x for x in ax.get_children() if isinstance(x, Rectangle)]
+ height = rects[-1].get_height()
+ tm.assert_almost_equal(height, 1.0)
+ _check_ticks_props(axes, xlabelsize=xf, xrot=xrot, ylabelsize=yf, yrot=yrot)
+
+ def test_grouped_hist_legacy_grouped_hist(self):
+ from pandas.plotting._matplotlib.hist import _grouped_hist
+
+ rs = np.random.default_rng(2)
+ df = DataFrame(rs.standard_normal((10, 1)), columns=["A"])
+ df["B"] = to_datetime(
+ rs.integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ df["C"] = rs.integers(0, 4, 10)
+ df["D"] = ["X"] * 10
+ axes = _grouped_hist(df.A, by=df.C, log=True)
+ # scale of y must be 'log'
+ _check_ax_scales(axes, yaxis="log")
+
+ def test_grouped_hist_legacy_external_err(self):
+ from pandas.plotting._matplotlib.hist import _grouped_hist
+
+ rs = np.random.default_rng(2)
+ df = DataFrame(rs.standard_normal((10, 1)), columns=["A"])
+ df["B"] = to_datetime(
+ rs.integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ df["C"] = rs.integers(0, 4, 10)
+ df["D"] = ["X"] * 10
+ # propagate attr exception from matplotlib.Axes.hist
+ with tm.external_error_raised(AttributeError):
+ _grouped_hist(df.A, by=df.C, foo="bar")
+
+ def test_grouped_hist_legacy_figsize_err(self):
+ rs = np.random.default_rng(2)
+ df = DataFrame(rs.standard_normal((10, 1)), columns=["A"])
+ df["B"] = to_datetime(
+ rs.integers(
+ 812419200000000000,
+ 819331200000000000,
+ size=10,
+ dtype=np.int64,
+ )
+ )
+ df["C"] = rs.integers(0, 4, 10)
+ df["D"] = ["X"] * 10
+ msg = "Specify figure size by tuple instead"
+ with pytest.raises(ValueError, match=msg):
+ df.hist(by="C", figsize="default")
+
+ def test_grouped_hist_legacy2(self):
+ n = 10
+ weight = Series(np.random.default_rng(2).normal(166, 20, size=n))
+ height = Series(np.random.default_rng(2).normal(60, 10, size=n))
+ gender_int = np.random.default_rng(2).choice([0, 1], size=n)
+ df_int = DataFrame({"height": height, "weight": weight, "gender": gender_int})
+ gb = df_int.groupby("gender")
+ axes = gb.hist()
+ assert len(axes) == 2
+ assert len(mpl.pyplot.get_fignums()) == 2
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "msg, plot_col, by_col, layout",
+ [
+ [
+ "Layout of 1x1 must be larger than required size 2",
+ "weight",
+ "gender",
+ (1, 1),
+ ],
+ [
+ "Layout of 1x3 must be larger than required size 4",
+ "height",
+ "category",
+ (1, 3),
+ ],
+ [
+ "At least one dimension of layout must be positive",
+ "height",
+ "category",
+ (-1, -1),
+ ],
+ ],
+ )
+ def test_grouped_hist_layout_error(self, hist_df, msg, plot_col, by_col, layout):
+ df = hist_df
+ with pytest.raises(ValueError, match=msg):
+ df.hist(column=plot_col, by=getattr(df, by_col), layout=layout)
+
+ @pytest.mark.slow
+ def test_grouped_hist_layout_warning(self, hist_df):
+ df = hist_df
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ axes = _check_plot_works(
+ df.hist, column="height", by=df.gender, layout=(2, 1)
+ )
+ _check_axes_shape(axes, axes_num=2, layout=(2, 1))
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "layout, check_layout, figsize",
+ [[(4, 1), (4, 1), None], [(-1, 1), (4, 1), None], [(4, 2), (4, 2), (12, 8)]],
+ )
+ def test_grouped_hist_layout_figsize(self, hist_df, layout, check_layout, figsize):
+ df = hist_df
+ axes = df.hist(column="height", by=df.category, layout=layout, figsize=figsize)
+ _check_axes_shape(axes, axes_num=4, layout=check_layout, figsize=figsize)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize("kwargs", [{}, {"column": "height", "layout": (2, 2)}])
+ def test_grouped_hist_layout_by_warning(self, hist_df, kwargs):
+ df = hist_df
+ # GH 6769
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ axes = _check_plot_works(df.hist, by="classroom", **kwargs)
+ _check_axes_shape(axes, axes_num=3, layout=(2, 2))
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "kwargs, axes_num, layout",
+ [
+ [{"by": "gender", "layout": (3, 5)}, 2, (3, 5)],
+ [{"column": ["height", "weight", "category"]}, 3, (2, 2)],
+ ],
+ )
+ def test_grouped_hist_layout_axes(self, hist_df, kwargs, axes_num, layout):
+ df = hist_df
+ axes = df.hist(**kwargs)
+ _check_axes_shape(axes, axes_num=axes_num, layout=layout)
+
+ def test_grouped_hist_multiple_axes(self, hist_df):
+ # GH 6970, GH 7069
+ df = hist_df
+
+ fig, axes = mpl.pyplot.subplots(2, 3)
+ returned = df.hist(column=["height", "weight", "category"], ax=axes[0])
+ _check_axes_shape(returned, axes_num=3, layout=(1, 3))
+ tm.assert_numpy_array_equal(returned, axes[0])
+ assert returned[0].figure is fig
+
+ def test_grouped_hist_multiple_axes_no_cols(self, hist_df):
+ # GH 6970, GH 7069
+ df = hist_df
+
+ fig, axes = mpl.pyplot.subplots(2, 3)
+ returned = df.hist(by="classroom", ax=axes[1])
+ _check_axes_shape(returned, axes_num=3, layout=(1, 3))
+ tm.assert_numpy_array_equal(returned, axes[1])
+ assert returned[0].figure is fig
+
+ def test_grouped_hist_multiple_axes_error(self, hist_df):
+ # GH 6970, GH 7069
+ df = hist_df
+ fig, axes = mpl.pyplot.subplots(2, 3)
+ # pass different number of axes from required
+ msg = "The number of passed axes must be 1, the same as the output plot"
+ with pytest.raises(ValueError, match=msg):
+ axes = df.hist(column="height", ax=axes)
+
+ def test_axis_share_x(self, hist_df):
+ df = hist_df
+ # GH4089
+ ax1, ax2 = df.hist(column="height", by=df.gender, sharex=True)
+
+ # share x
+ assert get_x_axis(ax1).joined(ax1, ax2)
+ assert get_x_axis(ax2).joined(ax1, ax2)
+
+ # don't share y
+ assert not get_y_axis(ax1).joined(ax1, ax2)
+ assert not get_y_axis(ax2).joined(ax1, ax2)
+
+ def test_axis_share_y(self, hist_df):
+ df = hist_df
+ ax1, ax2 = df.hist(column="height", by=df.gender, sharey=True)
+
+ # share y
+ assert get_y_axis(ax1).joined(ax1, ax2)
+ assert get_y_axis(ax2).joined(ax1, ax2)
+
+ # don't share x
+ assert not get_x_axis(ax1).joined(ax1, ax2)
+ assert not get_x_axis(ax2).joined(ax1, ax2)
+
+ def test_axis_share_xy(self, hist_df):
+ df = hist_df
+ ax1, ax2 = df.hist(column="height", by=df.gender, sharex=True, sharey=True)
+
+ # share both x and y
+ assert get_x_axis(ax1).joined(ax1, ax2)
+ assert get_x_axis(ax2).joined(ax1, ax2)
+
+ assert get_y_axis(ax1).joined(ax1, ax2)
+ assert get_y_axis(ax2).joined(ax1, ax2)
+
+ @pytest.mark.parametrize(
+ "histtype, expected",
+ [
+ ("bar", True),
+ ("barstacked", True),
+ ("step", False),
+ ("stepfilled", True),
+ ],
+ )
+ def test_histtype_argument(self, histtype, expected):
+ # GH23992 Verify functioning of histtype argument
+ df = DataFrame(
+ np.random.default_rng(2).integers(1, 10, size=(10, 2)), columns=["a", "b"]
+ )
+ ax = df.hist(by="a", histtype=histtype)
+ _check_patches_all_filled(ax, filled=expected)
diff --git a/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_misc.py b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_misc.py
new file mode 100644
index 0000000000000000000000000000000000000000..cfb657c2a800fefe2d509ddfb398399af4ce8649
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pandas/tests/plotting/test_misc.py
@@ -0,0 +1,720 @@
+""" Test cases for misc plot functions """
+import os
+
+import numpy as np
+import pytest
+
+import pandas.util._test_decorators as td
+
+from pandas import (
+ DataFrame,
+ Index,
+ Series,
+ Timestamp,
+ date_range,
+ interval_range,
+ period_range,
+ plotting,
+ read_csv,
+)
+import pandas._testing as tm
+from pandas.tests.plotting.common import (
+ _check_colors,
+ _check_legend_labels,
+ _check_plot_works,
+ _check_text_labels,
+ _check_ticks_props,
+)
+
+mpl = pytest.importorskip("matplotlib")
+plt = pytest.importorskip("matplotlib.pyplot")
+cm = pytest.importorskip("matplotlib.cm")
+
+
+@pytest.fixture
+def iris(datapath) -> DataFrame:
+ """
+ The iris dataset as a DataFrame.
+ """
+ return read_csv(datapath("io", "data", "csv", "iris.csv"))
+
+
+@td.skip_if_installed("matplotlib")
+def test_import_error_message():
+ # GH-19810
+ df = DataFrame({"A": [1, 2]})
+
+ with pytest.raises(ImportError, match="matplotlib is required for plotting"):
+ df.plot()
+
+
+def test_get_accessor_args():
+ func = plotting._core.PlotAccessor._get_call_args
+
+ msg = "Called plot accessor for type list, expected Series or DataFrame"
+ with pytest.raises(TypeError, match=msg):
+ func(backend_name="", data=[], args=[], kwargs={})
+
+ msg = "should not be called with positional arguments"
+ with pytest.raises(TypeError, match=msg):
+ func(backend_name="", data=Series(dtype=object), args=["line", None], kwargs={})
+
+ x, y, kind, kwargs = func(
+ backend_name="",
+ data=DataFrame(),
+ args=["x"],
+ kwargs={"y": "y", "kind": "bar", "grid": False},
+ )
+ assert x == "x"
+ assert y == "y"
+ assert kind == "bar"
+ assert kwargs == {"grid": False}
+
+ x, y, kind, kwargs = func(
+ backend_name="pandas.plotting._matplotlib",
+ data=Series(dtype=object),
+ args=[],
+ kwargs={},
+ )
+ assert x is None
+ assert y is None
+ assert kind == "line"
+ assert len(kwargs) == 24
+
+
+@pytest.mark.parametrize("kind", plotting.PlotAccessor._all_kinds)
+@pytest.mark.parametrize(
+ "data", [DataFrame(np.arange(15).reshape(5, 3)), Series(range(5))]
+)
+@pytest.mark.parametrize(
+ "index",
+ [
+ Index(range(5)),
+ date_range("2020-01-01", periods=5),
+ period_range("2020-01-01", periods=5),
+ ],
+)
+def test_savefig(kind, data, index):
+ fig, ax = plt.subplots()
+ data.index = index
+ kwargs = {}
+ if kind in ["hexbin", "scatter", "pie"]:
+ if isinstance(data, Series):
+ pytest.skip(f"{kind} not supported with Series")
+ kwargs = {"x": 0, "y": 1}
+ data.plot(kind=kind, ax=ax, **kwargs)
+ fig.savefig(os.devnull)
+
+
+class TestSeriesPlots:
+ def test_autocorrelation_plot(self):
+ from pandas.plotting import autocorrelation_plot
+
+ ser = Series(
+ np.arange(10, dtype=np.float64),
+ index=date_range("2020-01-01", periods=10),
+ name="ts",
+ )
+ # Ensure no UserWarning when making plot
+ with tm.assert_produces_warning(None):
+ _check_plot_works(autocorrelation_plot, series=ser)
+ _check_plot_works(autocorrelation_plot, series=ser.values)
+
+ ax = autocorrelation_plot(ser, label="Test")
+ _check_legend_labels(ax, labels=["Test"])
+
+ @pytest.mark.parametrize("kwargs", [{}, {"lag": 5}])
+ def test_lag_plot(self, kwargs):
+ from pandas.plotting import lag_plot
+
+ ser = Series(
+ np.arange(10, dtype=np.float64),
+ index=date_range("2020-01-01", periods=10),
+ name="ts",
+ )
+ _check_plot_works(lag_plot, series=ser, **kwargs)
+
+ def test_bootstrap_plot(self):
+ from pandas.plotting import bootstrap_plot
+
+ ser = Series(
+ np.arange(10, dtype=np.float64),
+ index=date_range("2020-01-01", periods=10),
+ name="ts",
+ )
+ _check_plot_works(bootstrap_plot, series=ser, size=10)
+
+
+class TestDataFramePlots:
+ @pytest.mark.parametrize("pass_axis", [False, True])
+ def test_scatter_matrix_axis(self, pass_axis):
+ pytest.importorskip("scipy")
+ scatter_matrix = plotting.scatter_matrix
+
+ ax = None
+ if pass_axis:
+ _, ax = mpl.pyplot.subplots(3, 3)
+
+ df = DataFrame(np.random.default_rng(2).standard_normal((100, 3)))
+
+ # we are plotting multiples on a sub-plot
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ axes = _check_plot_works(
+ scatter_matrix,
+ frame=df,
+ range_padding=0.1,
+ ax=ax,
+ )
+ axes0_labels = axes[0][0].yaxis.get_majorticklabels()
+ # GH 5662
+ expected = ["-2", "0", "2"]
+ _check_text_labels(axes0_labels, expected)
+ _check_ticks_props(axes, xlabelsize=8, xrot=90, ylabelsize=8, yrot=0)
+
+ @pytest.mark.parametrize("pass_axis", [False, True])
+ def test_scatter_matrix_axis_smaller(self, pass_axis):
+ pytest.importorskip("scipy")
+ scatter_matrix = plotting.scatter_matrix
+
+ ax = None
+ if pass_axis:
+ _, ax = mpl.pyplot.subplots(3, 3)
+
+ df = DataFrame(np.random.default_rng(11).standard_normal((100, 3)))
+ df[0] = (df[0] - 2) / 3
+
+ # we are plotting multiples on a sub-plot
+ with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
+ axes = _check_plot_works(
+ scatter_matrix,
+ frame=df,
+ range_padding=0.1,
+ ax=ax,
+ )
+ axes0_labels = axes[0][0].yaxis.get_majorticklabels()
+ expected = ["-1.0", "-0.5", "0.0"]
+ _check_text_labels(axes0_labels, expected)
+ _check_ticks_props(axes, xlabelsize=8, xrot=90, ylabelsize=8, yrot=0)
+
+ @pytest.mark.slow
+ def test_andrews_curves_no_warning(self, iris):
+ from pandas.plotting import andrews_curves
+
+ df = iris
+ # Ensure no UserWarning when making plot
+ with tm.assert_produces_warning(None):
+ _check_plot_works(andrews_curves, frame=df, class_column="Name")
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "linecolors",
+ [
+ ("#556270", "#4ECDC4", "#C7F464"),
+ ["dodgerblue", "aquamarine", "seagreen"],
+ ],
+ )
+ @pytest.mark.parametrize(
+ "df",
+ [
+ "iris",
+ DataFrame(
+ {
+ "A": np.random.default_rng(2).standard_normal(10),
+ "B": np.random.default_rng(2).standard_normal(10),
+ "C": np.random.default_rng(2).standard_normal(10),
+ "Name": ["A"] * 10,
+ }
+ ),
+ ],
+ )
+ def test_andrews_curves_linecolors(self, request, df, linecolors):
+ from pandas.plotting import andrews_curves
+
+ if isinstance(df, str):
+ df = request.getfixturevalue(df)
+ ax = _check_plot_works(
+ andrews_curves, frame=df, class_column="Name", color=linecolors
+ )
+ _check_colors(
+ ax.get_lines()[:10], linecolors=linecolors, mapping=df["Name"][:10]
+ )
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "df",
+ [
+ "iris",
+ DataFrame(
+ {
+ "A": np.random.default_rng(2).standard_normal(10),
+ "B": np.random.default_rng(2).standard_normal(10),
+ "C": np.random.default_rng(2).standard_normal(10),
+ "Name": ["A"] * 10,
+ }
+ ),
+ ],
+ )
+ def test_andrews_curves_cmap(self, request, df):
+ from pandas.plotting import andrews_curves
+
+ if isinstance(df, str):
+ df = request.getfixturevalue(df)
+ cmaps = [cm.jet(n) for n in np.linspace(0, 1, df["Name"].nunique())]
+ ax = _check_plot_works(
+ andrews_curves, frame=df, class_column="Name", color=cmaps
+ )
+ _check_colors(ax.get_lines()[:10], linecolors=cmaps, mapping=df["Name"][:10])
+
+ @pytest.mark.slow
+ def test_andrews_curves_handle(self):
+ from pandas.plotting import andrews_curves
+
+ colors = ["b", "g", "r"]
+ df = DataFrame({"A": [1, 2, 3], "B": [1, 2, 3], "C": [1, 2, 3], "Name": colors})
+ ax = andrews_curves(df, "Name", color=colors)
+ handles, _ = ax.get_legend_handles_labels()
+ _check_colors(handles, linecolors=colors)
+
+ @pytest.mark.slow
+ @pytest.mark.parametrize(
+ "color",
+ [("#556270", "#4ECDC4", "#C7F464"), ["dodgerblue", "aquamarine", "seagreen"]],
+ )
+ def test_parallel_coordinates_colors(self, iris, color):
+ from pandas.plotting import parallel_coordinates
+
+ df = iris
+
+ ax = _check_plot_works(
+ parallel_coordinates, frame=df, class_column="Name", color=color
+ )
+ _check_colors(ax.get_lines()[:10], linecolors=color, mapping=df["Name"][:10])
+
+ @pytest.mark.slow
+ def test_parallel_coordinates_cmap(self, iris):
+ from matplotlib import cm
+
+ from pandas.plotting import parallel_coordinates
+
+ df = iris
+
+ ax = _check_plot_works(
+ parallel_coordinates, frame=df, class_column="Name", colormap=cm.jet
+ )
+ cmaps = [cm.jet(n) for n in np.linspace(0, 1, df["Name"].nunique())]
+ _check_colors(ax.get_lines()[:10], linecolors=cmaps, mapping=df["Name"][:10])
+
+ @pytest.mark.slow
+ def test_parallel_coordinates_line_diff(self, iris):
+ from pandas.plotting import parallel_coordinates
+
+ df = iris
+
+ ax = _check_plot_works(parallel_coordinates, frame=df, class_column="Name")
+ nlines = len(ax.get_lines())
+ nxticks = len(ax.xaxis.get_ticklabels())
+
+ ax = _check_plot_works(
+ parallel_coordinates, frame=df, class_column="Name", axvlines=False
+ )
+ assert len(ax.get_lines()) == (nlines - nxticks)
+
+ @pytest.mark.slow
+ def test_parallel_coordinates_handles(self, iris):
+ from pandas.plotting import parallel_coordinates
+
+ df = iris
+ colors = ["b", "g", "r"]
+ df = DataFrame({"A": [1, 2, 3], "B": [1, 2, 3], "C": [1, 2, 3], "Name": colors})
+ ax = parallel_coordinates(df, "Name", color=colors)
+ handles, _ = ax.get_legend_handles_labels()
+ _check_colors(handles, linecolors=colors)
+
+ # not sure if this is indicative of a problem
+ @pytest.mark.filterwarnings("ignore:Attempting to set:UserWarning")
+ def test_parallel_coordinates_with_sorted_labels(self):
+ """For #15908"""
+ from pandas.plotting import parallel_coordinates
+
+ df = DataFrame(
+ {
+ "feat": list(range(30)),
+ "class": [2 for _ in range(10)]
+ + [3 for _ in range(10)]
+ + [1 for _ in range(10)],
+ }
+ )
+ ax = parallel_coordinates(df, "class", sort_labels=True)
+ polylines, labels = ax.get_legend_handles_labels()
+ color_label_tuples = zip(
+ [polyline.get_color() for polyline in polylines], labels
+ )
+ ordered_color_label_tuples = sorted(color_label_tuples, key=lambda x: x[1])
+ prev_next_tupels = zip(
+ list(ordered_color_label_tuples[0:-1]), list(ordered_color_label_tuples[1:])
+ )
+ for prev, nxt in prev_next_tupels:
+ # labels and colors are ordered strictly increasing
+ assert prev[1] < nxt[1] and prev[0] < nxt[0]
+
+ def test_radviz_no_warning(self, iris):
+ from pandas.plotting import radviz
+
+ df = iris
+ # Ensure no UserWarning when making plot
+ with tm.assert_produces_warning(None):
+ _check_plot_works(radviz, frame=df, class_column="Name")
+
+ @pytest.mark.parametrize(
+ "color",
+ [("#556270", "#4ECDC4", "#C7F464"), ["dodgerblue", "aquamarine", "seagreen"]],
+ )
+ def test_radviz_color(self, iris, color):
+ from pandas.plotting import radviz
+
+ df = iris
+ ax = _check_plot_works(radviz, frame=df, class_column="Name", color=color)
+ # skip Circle drawn as ticks
+ patches = [p for p in ax.patches[:20] if p.get_label() != ""]
+ _check_colors(patches[:10], facecolors=color, mapping=df["Name"][:10])
+
+ def test_radviz_color_cmap(self, iris):
+ from matplotlib import cm
+
+ from pandas.plotting import radviz
+
+ df = iris
+ ax = _check_plot_works(radviz, frame=df, class_column="Name", colormap=cm.jet)
+ cmaps = [cm.jet(n) for n in np.linspace(0, 1, df["Name"].nunique())]
+ patches = [p for p in ax.patches[:20] if p.get_label() != ""]
+ _check_colors(patches, facecolors=cmaps, mapping=df["Name"][:10])
+
+ def test_radviz_colors_handles(self):
+ from pandas.plotting import radviz
+
+ colors = [[0.0, 0.0, 1.0, 1.0], [0.0, 0.5, 1.0, 1.0], [1.0, 0.0, 0.0, 1.0]]
+ df = DataFrame(
+ {"A": [1, 2, 3], "B": [2, 1, 3], "C": [3, 2, 1], "Name": ["b", "g", "r"]}
+ )
+ ax = radviz(df, "Name", color=colors)
+ handles, _ = ax.get_legend_handles_labels()
+ _check_colors(handles, facecolors=colors)
+
+ def test_subplot_titles(self, iris):
+ df = iris.drop("Name", axis=1).head()
+ # Use the column names as the subplot titles
+ title = list(df.columns)
+
+ # Case len(title) == len(df)
+ plot = df.plot(subplots=True, title=title)
+ assert [p.get_title() for p in plot] == title
+
+ def test_subplot_titles_too_much(self, iris):
+ df = iris.drop("Name", axis=1).head()
+ # Use the column names as the subplot titles
+ title = list(df.columns)
+ # Case len(title) > len(df)
+ msg = (
+ "The length of `title` must equal the number of columns if "
+ "using `title` of type `list` and `subplots=True`"
+ )
+ with pytest.raises(ValueError, match=msg):
+ df.plot(subplots=True, title=title + ["kittens > puppies"])
+
+ def test_subplot_titles_too_little(self, iris):
+ df = iris.drop("Name", axis=1).head()
+ # Use the column names as the subplot titles
+ title = list(df.columns)
+ msg = (
+ "The length of `title` must equal the number of columns if "
+ "using `title` of type `list` and `subplots=True`"
+ )
+ # Case len(title) < len(df)
+ with pytest.raises(ValueError, match=msg):
+ df.plot(subplots=True, title=title[:2])
+
+ def test_subplot_titles_subplots_false(self, iris):
+ df = iris.drop("Name", axis=1).head()
+ # Use the column names as the subplot titles
+ title = list(df.columns)
+ # Case subplots=False and title is of type list
+ msg = (
+ "Using `title` of type `list` is not supported unless "
+ "`subplots=True` is passed"
+ )
+ with pytest.raises(ValueError, match=msg):
+ df.plot(subplots=False, title=title)
+
+ def test_subplot_titles_numeric_square_layout(self, iris):
+ df = iris.drop("Name", axis=1).head()
+ # Use the column names as the subplot titles
+ title = list(df.columns)
+ # Case df with 3 numeric columns but layout of (2,2)
+ plot = df.drop("SepalWidth", axis=1).plot(
+ subplots=True, layout=(2, 2), title=title[:-1]
+ )
+ title_list = [ax.get_title() for sublist in plot for ax in sublist]
+ assert title_list == title[:3] + [""]
+
+ def test_get_standard_colors_random_seed(self):
+ # GH17525
+ df = DataFrame(np.zeros((10, 10)))
+
+ # Make sure that the random seed isn't reset by get_standard_colors
+ plotting.parallel_coordinates(df, 0)
+ rand1 = np.random.default_rng(None).random()
+ plotting.parallel_coordinates(df, 0)
+ rand2 = np.random.default_rng(None).random()
+ assert rand1 != rand2
+
+ def test_get_standard_colors_consistency(self):
+ # GH17525
+ # Make sure it produces the same colors every time it's called
+ from pandas.plotting._matplotlib.style import get_standard_colors
+
+ color1 = get_standard_colors(1, color_type="random")
+ color2 = get_standard_colors(1, color_type="random")
+ assert color1 == color2
+
+ def test_get_standard_colors_default_num_colors(self):
+ from pandas.plotting._matplotlib.style import get_standard_colors
+
+ # Make sure the default color_types returns the specified amount
+ color1 = get_standard_colors(1, color_type="default")
+ color2 = get_standard_colors(9, color_type="default")
+ color3 = get_standard_colors(20, color_type="default")
+ assert len(color1) == 1
+ assert len(color2) == 9
+ assert len(color3) == 20
+
+ def test_plot_single_color(self):
+ # Example from #20585. All 3 bars should have the same color
+ df = DataFrame(
+ {
+ "account-start": ["2017-02-03", "2017-03-03", "2017-01-01"],
+ "client": ["Alice Anders", "Bob Baker", "Charlie Chaplin"],
+ "balance": [-1432.32, 10.43, 30000.00],
+ "db-id": [1234, 2424, 251],
+ "proxy-id": [525, 1525, 2542],
+ "rank": [52, 525, 32],
+ }
+ )
+ ax = df.client.value_counts().plot.bar()
+ colors = [rect.get_facecolor() for rect in ax.get_children()[0:3]]
+ assert all(color == colors[0] for color in colors)
+
+ def test_get_standard_colors_no_appending(self):
+ # GH20726
+
+ # Make sure not to add more colors so that matplotlib can cycle
+ # correctly.
+ from matplotlib import cm
+
+ from pandas.plotting._matplotlib.style import get_standard_colors
+
+ color_before = cm.gnuplot(range(5))
+ color_after = get_standard_colors(1, color=color_before)
+ assert len(color_after) == len(color_before)
+
+ df = DataFrame(
+ np.random.default_rng(2).standard_normal((48, 4)), columns=list("ABCD")
+ )
+
+ color_list = cm.gnuplot(np.linspace(0, 1, 16))
+ p = df.A.plot.bar(figsize=(16, 7), color=color_list)
+ assert p.patches[1].get_facecolor() == p.patches[17].get_facecolor()
+
+ @pytest.mark.parametrize("kind", ["bar", "line"])
+ def test_dictionary_color(self, kind):
+ # issue-8193
+ # Test plot color dictionary format
+ data_files = ["a", "b"]
+
+ expected = [(0.5, 0.24, 0.6), (0.3, 0.7, 0.7)]
+
+ df1 = DataFrame(np.random.default_rng(2).random((2, 2)), columns=data_files)
+ dic_color = {"b": (0.3, 0.7, 0.7), "a": (0.5, 0.24, 0.6)}
+
+ ax = df1.plot(kind=kind, color=dic_color)
+ if kind == "bar":
+ colors = [rect.get_facecolor()[0:-1] for rect in ax.get_children()[0:3:2]]
+ else:
+ colors = [rect.get_color() for rect in ax.get_lines()[0:2]]
+ assert all(color == expected[index] for index, color in enumerate(colors))
+
+ def test_bar_plot(self):
+ # GH38947
+ # Test bar plot with string and int index
+ from matplotlib.text import Text
+
+ expected = [Text(0, 0, "0"), Text(1, 0, "Total")]
+
+ df = DataFrame(
+ {
+ "a": [1, 2],
+ },
+ index=Index([0, "Total"]),
+ )
+ plot_bar = df.plot.bar()
+ assert all(
+ (a.get_text() == b.get_text())
+ for a, b in zip(plot_bar.get_xticklabels(), expected)
+ )
+
+ def test_barh_plot_labels_mixed_integer_string(self):
+ # GH39126
+ # Test barh plot with string and integer at the same column
+ from matplotlib.text import Text
+
+ df = DataFrame([{"word": 1, "value": 0}, {"word": "knowledge", "value": 2}])
+ plot_barh = df.plot.barh(x="word", legend=None)
+ expected_yticklabels = [Text(0, 0, "1"), Text(0, 1, "knowledge")]
+ assert all(
+ actual.get_text() == expected.get_text()
+ for actual, expected in zip(
+ plot_barh.get_yticklabels(), expected_yticklabels
+ )
+ )
+
+ def test_has_externally_shared_axis_x_axis(self):
+ # GH33819
+ # Test _has_externally_shared_axis() works for x-axis
+ func = plotting._matplotlib.tools._has_externally_shared_axis
+
+ fig = mpl.pyplot.figure()
+ plots = fig.subplots(2, 4)
+
+ # Create *externally* shared axes for first and third columns
+ plots[0][0] = fig.add_subplot(231, sharex=plots[1][0])
+ plots[0][2] = fig.add_subplot(233, sharex=plots[1][2])
+
+ # Create *internally* shared axes for second and third columns
+ plots[0][1].twinx()
+ plots[0][2].twinx()
+
+ # First column is only externally shared
+ # Second column is only internally shared
+ # Third column is both
+ # Fourth column is neither
+ assert func(plots[0][0], "x")
+ assert not func(plots[0][1], "x")
+ assert func(plots[0][2], "x")
+ assert not func(plots[0][3], "x")
+
+ def test_has_externally_shared_axis_y_axis(self):
+ # GH33819
+ # Test _has_externally_shared_axis() works for y-axis
+ func = plotting._matplotlib.tools._has_externally_shared_axis
+
+ fig = mpl.pyplot.figure()
+ plots = fig.subplots(4, 2)
+
+ # Create *externally* shared axes for first and third rows
+ plots[0][0] = fig.add_subplot(321, sharey=plots[0][1])
+ plots[2][0] = fig.add_subplot(325, sharey=plots[2][1])
+
+ # Create *internally* shared axes for second and third rows
+ plots[1][0].twiny()
+ plots[2][0].twiny()
+
+ # First row is only externally shared
+ # Second row is only internally shared
+ # Third row is both
+ # Fourth row is neither
+ assert func(plots[0][0], "y")
+ assert not func(plots[1][0], "y")
+ assert func(plots[2][0], "y")
+ assert not func(plots[3][0], "y")
+
+ def test_has_externally_shared_axis_invalid_compare_axis(self):
+ # GH33819
+ # Test _has_externally_shared_axis() raises an exception when
+ # passed an invalid value as compare_axis parameter
+ func = plotting._matplotlib.tools._has_externally_shared_axis
+
+ fig = mpl.pyplot.figure()
+ plots = fig.subplots(4, 2)
+
+ # Create arbitrary axes
+ plots[0][0] = fig.add_subplot(321, sharey=plots[0][1])
+
+ # Check that an invalid compare_axis value triggers the expected exception
+ msg = "needs 'x' or 'y' as a second parameter"
+ with pytest.raises(ValueError, match=msg):
+ func(plots[0][0], "z")
+
+ def test_externally_shared_axes(self):
+ # Example from GH33819
+ # Create data
+ df = DataFrame(
+ {
+ "a": np.random.default_rng(2).standard_normal(1000),
+ "b": np.random.default_rng(2).standard_normal(1000),
+ }
+ )
+
+ # Create figure
+ fig = mpl.pyplot.figure()
+ plots = fig.subplots(2, 3)
+
+ # Create *externally* shared axes
+ plots[0][0] = fig.add_subplot(231, sharex=plots[1][0])
+ # note: no plots[0][1] that's the twin only case
+ plots[0][2] = fig.add_subplot(233, sharex=plots[1][2])
+
+ # Create *internally* shared axes
+ # note: no plots[0][0] that's the external only case
+ twin_ax1 = plots[0][1].twinx()
+ twin_ax2 = plots[0][2].twinx()
+
+ # Plot data to primary axes
+ df["a"].plot(ax=plots[0][0], title="External share only").set_xlabel(
+ "this label should never be visible"
+ )
+ df["a"].plot(ax=plots[1][0])
+
+ df["a"].plot(ax=plots[0][1], title="Internal share (twin) only").set_xlabel(
+ "this label should always be visible"
+ )
+ df["a"].plot(ax=plots[1][1])
+
+ df["a"].plot(ax=plots[0][2], title="Both").set_xlabel(
+ "this label should never be visible"
+ )
+ df["a"].plot(ax=plots[1][2])
+
+ # Plot data to twinned axes
+ df["b"].plot(ax=twin_ax1, color="green")
+ df["b"].plot(ax=twin_ax2, color="yellow")
+
+ assert not plots[0][0].xaxis.get_label().get_visible()
+ assert plots[0][1].xaxis.get_label().get_visible()
+ assert not plots[0][2].xaxis.get_label().get_visible()
+
+ def test_plot_bar_axis_units_timestamp_conversion(self):
+ # GH 38736
+ # Ensure string x-axis from the second plot will not be converted to datetime
+ # due to axis data from first plot
+ df = DataFrame(
+ [1.0],
+ index=[Timestamp("2022-02-22 22:22:22")],
+ )
+ _check_plot_works(df.plot)
+ s = Series({"A": 1.0})
+ _check_plot_works(s.plot.bar)
+
+ def test_bar_plt_xaxis_intervalrange(self):
+ # GH 38969
+ # Ensure IntervalIndex x-axis produces a bar plot as expected
+ from matplotlib.text import Text
+
+ expected = [Text(0, 0, "([0, 1],)"), Text(1, 0, "([1, 2],)")]
+ s = Series(
+ [1, 2],
+ index=[interval_range(0, 2, closed="both")],
+ )
+ _check_plot_works(s.plot.bar)
+ assert all(
+ (a.get_text() == b.get_text())
+ for a, b in zip(s.plot.bar().get_xticklabels(), expected)
+ )