ghh1125 commited on
Commit
db62a8c
·
verified ·
1 Parent(s): 5d6ffb6

Upload 765 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +115 -0
  2. Dockerfile +18 -0
  3. MetPy/mcp_output/README_MCP.md +151 -0
  4. MetPy/mcp_output/analysis.json +996 -0
  5. MetPy/mcp_output/diff_report.md +138 -0
  6. MetPy/mcp_output/mcp_plugin/__init__.py +0 -0
  7. MetPy/mcp_output/mcp_plugin/adapter.py +293 -0
  8. MetPy/mcp_output/mcp_plugin/main.py +13 -0
  9. MetPy/mcp_output/mcp_plugin/mcp_service.py +314 -0
  10. MetPy/mcp_output/requirements.txt +14 -0
  11. MetPy/mcp_output/start_mcp.py +30 -0
  12. MetPy/mcp_output/workflow_summary.json +225 -0
  13. MetPy/source/.DS_Store +0 -0
  14. MetPy/source/.codecov.yml +24 -0
  15. MetPy/source/.codespellexclude +19 -0
  16. MetPy/source/.codespellignore +1 -0
  17. MetPy/source/.coveragerc +10 -0
  18. MetPy/source/.devcontainer/Dockerfile +7 -0
  19. MetPy/source/.devcontainer/devcontainer.json +43 -0
  20. MetPy/source/.lgtm.yml +7 -0
  21. MetPy/source/.mailmap +26 -0
  22. MetPy/source/.markdownlint.yaml +6 -0
  23. MetPy/source/.mdl_style.rb +6 -0
  24. MetPy/source/.mdlrc +2 -0
  25. MetPy/source/.qlty/qlty.toml +17 -0
  26. MetPy/source/.stickler.yml +4 -0
  27. MetPy/source/AUTHORS.txt +52 -0
  28. MetPy/source/CITATION.cff +142 -0
  29. MetPy/source/CLA.md +217 -0
  30. MetPy/source/CODE_OF_CONDUCT.md +68 -0
  31. MetPy/source/CONTRIBUTING.md +412 -0
  32. MetPy/source/LICENSE +29 -0
  33. MetPy/source/MANIFEST.in +3 -0
  34. MetPy/source/README.md +118 -0
  35. MetPy/source/SUPPORT.md +38 -0
  36. MetPy/source/__init__.py +4 -0
  37. MetPy/source/benchmarks/Dockerfile +7 -0
  38. MetPy/source/benchmarks/Jenkinsfile +87 -0
  39. MetPy/source/benchmarks/asv.conf.json +208 -0
  40. MetPy/source/benchmarks/asv_run_script.sh +11 -0
  41. MetPy/source/benchmarks/benchmarks/__init__.py +4 -0
  42. MetPy/source/benchmarks/benchmarks/apparent_temp_benchmarks.py +66 -0
  43. MetPy/source/benchmarks/benchmarks/bound_layer_turbulence_benchmarks.py +70 -0
  44. MetPy/source/benchmarks/benchmarks/dry_thermo_benchmarks.py +117 -0
  45. MetPy/source/benchmarks/benchmarks/dyn_kin_benchmarks.py +159 -0
  46. MetPy/source/benchmarks/benchmarks/math_fctn_benchmarks.py +86 -0
  47. MetPy/source/benchmarks/benchmarks/moist_thermo_benchmarks.py +220 -0
  48. MetPy/source/benchmarks/benchmarks/other_benchmarks.py +83 -0
  49. MetPy/source/benchmarks/benchmarks/smoothing_benchmarks.py +77 -0
  50. MetPy/source/benchmarks/benchmarks/soundings_benchmarks.py +225 -0
.gitattributes CHANGED
@@ -33,3 +33,118 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ MetPy/source/docs/_static/FCM-R19-2003-WindchillReport.pdf filter=lfs diff=lfs merge=lfs -text
37
+ MetPy/source/docs/_static/fundam.pdf filter=lfs diff=lfs merge=lfs -text
38
+ MetPy/source/docs/_static/IAPWS95-2018.pdf filter=lfs diff=lfs merge=lfs -text
39
+ MetPy/source/docs/_static/Miller1972.pdf filter=lfs diff=lfs merge=lfs -text
40
+ MetPy/source/docs/_static/NSF-Unidata_lockup_horizontal_2024_darkmode_v3.png filter=lfs diff=lfs merge=lfs -text
41
+ MetPy/source/docs/_static/NSF-Unidata_lockup_horizontal_2024.png filter=lfs diff=lfs merge=lfs -text
42
+ MetPy/source/docs/_static/NWS_10-201.pdf filter=lfs diff=lfs merge=lfs -text
43
+ MetPy/source/docs/_static/Res_IAU2012_B2.pdf filter=lfs diff=lfs merge=lfs -text
44
+ MetPy/source/docs/_static/Smithsonian1951.pdf filter=lfs diff=lfs merge=lfs -text
45
+ MetPy/source/docs/_static/USAF_SkewT_manual.pdf filter=lfs diff=lfs merge=lfs -text
46
+ MetPy/source/staticdata/AK-REGIONAL_8km_3.9_20160408_1445.gini filter=lfs diff=lfs merge=lfs -text
47
+ MetPy/source/staticdata/CAM_test.nc filter=lfs diff=lfs merge=lfs -text
48
+ MetPy/source/staticdata/gem_conical.grd filter=lfs diff=lfs merge=lfs -text
49
+ MetPy/source/staticdata/gem_cylindrical.grd filter=lfs diff=lfs merge=lfs -text
50
+ MetPy/source/staticdata/gem_ship.sfc filter=lfs diff=lfs merge=lfs -text
51
+ MetPy/source/staticdata/gem_surface_with_text.sfc filter=lfs diff=lfs merge=lfs -text
52
+ MetPy/source/staticdata/gem_unmerged_with_text.snd filter=lfs diff=lfs merge=lfs -text
53
+ MetPy/source/staticdata/GFS_global.nc filter=lfs diff=lfs merge=lfs -text
54
+ MetPy/source/staticdata/gfs_output.nc filter=lfs diff=lfs merge=lfs -text
55
+ MetPy/source/staticdata/GFS_test.nc filter=lfs diff=lfs merge=lfs -text
56
+ MetPy/source/staticdata/HI-REGIONAL_4km_3.9_20160616_1715.gini filter=lfs diff=lfs merge=lfs -text
57
+ MetPy/source/staticdata/irma_gfs_example.nc filter=lfs diff=lfs merge=lfs -text
58
+ MetPy/source/staticdata/KICX_20170712_1458 filter=lfs diff=lfs merge=lfs -text
59
+ MetPy/source/staticdata/Level2_FOP1_20191223_003655.ar2v filter=lfs diff=lfs merge=lfs -text
60
+ MetPy/source/staticdata/Level2_KDDC_20200823_204121.ar2v filter=lfs diff=lfs merge=lfs -text
61
+ MetPy/source/staticdata/Level2_KFTG_20150430_1419.ar2v filter=lfs diff=lfs merge=lfs -text
62
+ MetPy/source/staticdata/Level2_KLBB_single_chunk filter=lfs diff=lfs merge=lfs -text
63
+ MetPy/source/staticdata/Level3_Composite_dhr_1km_20180309_2225.gini filter=lfs diff=lfs merge=lfs -text
64
+ MetPy/source/staticdata/NAM_test.nc filter=lfs diff=lfs merge=lfs -text
65
+ MetPy/source/staticdata/narr_example.nc filter=lfs diff=lfs merge=lfs -text
66
+ MetPy/source/staticdata/NHEM-MULTICOMP_1km_IR_20151208_2100.gini filter=lfs diff=lfs merge=lfs -text
67
+ MetPy/source/staticdata/nids/KLZK_H0C_20200814_0417 filter=lfs diff=lfs merge=lfs -text
68
+ MetPy/source/staticdata/nids/KLZK_H0V_20200812_1309 filter=lfs diff=lfs merge=lfs -text
69
+ MetPy/source/staticdata/nids/KLZK_H0W_20200812_1305 filter=lfs diff=lfs merge=lfs -text
70
+ MetPy/source/staticdata/nids/KLZK_H0Z_20200812_1318 filter=lfs diff=lfs merge=lfs -text
71
+ MetPy/source/staticdata/nids/KRAX_DTA_20200818_0454.nids filter=lfs diff=lfs merge=lfs -text
72
+ MetPy/source/staticdata/nids/Level3_DEN_TZ0_20200804_2226.nids filter=lfs diff=lfs merge=lfs -text
73
+ MetPy/source/staticdata/nids/Level3_DEN_TZ1_20200804_2226.nids filter=lfs diff=lfs merge=lfs -text
74
+ MetPy/source/staticdata/nids/Level3_FTG_N0B_20220304_1820.nids filter=lfs diff=lfs merge=lfs -text
75
+ MetPy/source/staticdata/nids/Level3_MCI_TZL_20160526_2154.nids filter=lfs diff=lfs merge=lfs -text
76
+ MetPy/source/staticdata/PR-NATIONAL_1km_PCT_20200320_0446.gini filter=lfs diff=lfs merge=lfs -text
77
+ MetPy/source/staticdata/sfc_obs.gem filter=lfs diff=lfs merge=lfs -text
78
+ MetPy/source/staticdata/us_counties_20m.dbf filter=lfs diff=lfs merge=lfs -text
79
+ MetPy/source/staticdata/us_counties_20m.shp filter=lfs diff=lfs merge=lfs -text
80
+ MetPy/source/staticdata/us_counties_500k.dbf filter=lfs diff=lfs merge=lfs -text
81
+ MetPy/source/staticdata/us_counties_500k.shp filter=lfs diff=lfs merge=lfs -text
82
+ MetPy/source/staticdata/us_counties_5m.dbf filter=lfs diff=lfs merge=lfs -text
83
+ MetPy/source/staticdata/us_counties_5m.shp filter=lfs diff=lfs merge=lfs -text
84
+ MetPy/source/staticdata/us_states_20m.shp filter=lfs diff=lfs merge=lfs -text
85
+ MetPy/source/staticdata/us_states_500k.shp filter=lfs diff=lfs merge=lfs -text
86
+ MetPy/source/staticdata/us_states_5m.shp filter=lfs diff=lfs merge=lfs -text
87
+ MetPy/source/staticdata/WEST-CONUS_4km_WV_20151208_2200.gini filter=lfs diff=lfs merge=lfs -text
88
+ MetPy/source/staticdata/wrf_example.nc filter=lfs diff=lfs merge=lfs -text
89
+ MetPy/source/talks/MetPy[[:space:]]Infrastructure[[:space:]]-[[:space:]]SciPy[[:space:]]2016.pdf filter=lfs diff=lfs merge=lfs -text
90
+ MetPy/source/talks/MetPy[[:space:]]Presentation[[:space:]]-[[:space:]]AMS[[:space:]]2016.pdf filter=lfs diff=lfs merge=lfs -text
91
+ MetPy/source/tests/plots/baseline/test_colorfill_no_colorbar.png filter=lfs diff=lfs merge=lfs -text
92
+ MetPy/source/tests/plots/baseline/test_colorfill_with_image_range.png filter=lfs diff=lfs merge=lfs -text
93
+ MetPy/source/tests/plots/baseline/test_declarative_additional_layers_plot_options.png filter=lfs diff=lfs merge=lfs -text
94
+ MetPy/source/tests/plots/baseline/test_declarative_arrow_changes.png filter=lfs diff=lfs merge=lfs -text
95
+ MetPy/source/tests/plots/baseline/test_declarative_arrowkey.png filter=lfs diff=lfs merge=lfs -text
96
+ MetPy/source/tests/plots/baseline/test_declarative_arrowplot.png filter=lfs diff=lfs merge=lfs -text
97
+ MetPy/source/tests/plots/baseline/test_declarative_barb_earth_relative.png filter=lfs diff=lfs merge=lfs -text
98
+ MetPy/source/tests/plots/baseline/test_declarative_barb_gfs_knots.png filter=lfs diff=lfs merge=lfs -text
99
+ MetPy/source/tests/plots/baseline/test_declarative_barb_gfs.png filter=lfs diff=lfs merge=lfs -text
100
+ MetPy/source/tests/plots/baseline/test_declarative_barb_options.png filter=lfs diff=lfs merge=lfs -text
101
+ MetPy/source/tests/plots/baseline/test_declarative_barb_scale.png filter=lfs diff=lfs merge=lfs -text
102
+ MetPy/source/tests/plots/baseline/test_declarative_colorbar_fontsize.png filter=lfs diff=lfs merge=lfs -text
103
+ MetPy/source/tests/plots/baseline/test_declarative_contour_cam.png filter=lfs diff=lfs merge=lfs -text
104
+ MetPy/source/tests/plots/baseline/test_declarative_contour_convert_units.png filter=lfs diff=lfs merge=lfs -text
105
+ MetPy/source/tests/plots/baseline/test_declarative_contour_label_fontsize.png filter=lfs diff=lfs merge=lfs -text
106
+ MetPy/source/tests/plots/baseline/test_declarative_contour_options.png filter=lfs diff=lfs merge=lfs -text
107
+ MetPy/source/tests/plots/baseline/test_declarative_contour.png filter=lfs diff=lfs merge=lfs -text
108
+ MetPy/source/tests/plots/baseline/test_declarative_events.png filter=lfs diff=lfs merge=lfs -text
109
+ MetPy/source/tests/plots/baseline/test_declarative_figsize.png filter=lfs diff=lfs merge=lfs -text
110
+ MetPy/source/tests/plots/baseline/test_declarative_global_gfs.png filter=lfs diff=lfs merge=lfs -text
111
+ MetPy/source/tests/plots/baseline/test_declarative_gridded_scale.png filter=lfs diff=lfs merge=lfs -text
112
+ MetPy/source/tests/plots/baseline/test_declarative_image.png filter=lfs diff=lfs merge=lfs -text
113
+ MetPy/source/tests/plots/baseline/test_declarative_layers_plot_options.png filter=lfs diff=lfs merge=lfs -text
114
+ MetPy/source/tests/plots/baseline/test_declarative_multiple_sfc_obs_change_units.png filter=lfs diff=lfs merge=lfs -text
115
+ MetPy/source/tests/plots/baseline/test_declarative_overlay_projections.png filter=lfs diff=lfs merge=lfs -text
116
+ MetPy/source/tests/plots/baseline/test_declarative_plot_geometry_points.png filter=lfs diff=lfs merge=lfs -text
117
+ MetPy/source/tests/plots/baseline/test_declarative_plot_geometry_polygons.png filter=lfs diff=lfs merge=lfs -text
118
+ MetPy/source/tests/plots/baseline/test_declarative_plot_surface_analysis_custom.png filter=lfs diff=lfs merge=lfs -text
119
+ MetPy/source/tests/plots/baseline/test_declarative_plot_surface_analysis_default.png filter=lfs diff=lfs merge=lfs -text
120
+ MetPy/source/tests/plots/baseline/test_declarative_raster_options.png filter=lfs diff=lfs merge=lfs -text
121
+ MetPy/source/tests/plots/baseline/test_declarative_raster.png filter=lfs diff=lfs merge=lfs -text
122
+ MetPy/source/tests/plots/baseline/test_declarative_region_modifier_zoom_out.png filter=lfs diff=lfs merge=lfs -text
123
+ MetPy/source/tests/plots/baseline/test_declarative_sfc_obs_args.png filter=lfs diff=lfs merge=lfs -text
124
+ MetPy/source/tests/plots/baseline/test_declarative_sfc_obs_change_units.png filter=lfs diff=lfs merge=lfs -text
125
+ MetPy/source/tests/plots/baseline/test_declarative_sfc_obs_changes.png filter=lfs diff=lfs merge=lfs -text
126
+ MetPy/source/tests/plots/baseline/test_declarative_sfc_obs_full.png filter=lfs diff=lfs merge=lfs -text
127
+ MetPy/source/tests/plots/baseline/test_declarative_sfc_obs.png filter=lfs diff=lfs merge=lfs -text
128
+ MetPy/source/tests/plots/baseline/test_declarative_sfc_text.png filter=lfs diff=lfs merge=lfs -text
129
+ MetPy/source/tests/plots/baseline/test_declarative_smooth_contour_calculation.png filter=lfs diff=lfs merge=lfs -text
130
+ MetPy/source/tests/plots/baseline/test_declarative_smooth_contour_order.png filter=lfs diff=lfs merge=lfs -text
131
+ MetPy/source/tests/plots/baseline/test_declarative_smooth_contour.png filter=lfs diff=lfs merge=lfs -text
132
+ MetPy/source/tests/plots/baseline/test_declarative_smooth_field.png filter=lfs diff=lfs merge=lfs -text
133
+ MetPy/source/tests/plots/baseline/test_declarative_station_plot_fontsize.png filter=lfs diff=lfs merge=lfs -text
134
+ MetPy/source/tests/plots/baseline/test_declarative_title_fontsize.png filter=lfs diff=lfs merge=lfs -text
135
+ MetPy/source/tests/plots/baseline/test_declarative_titles.png filter=lfs diff=lfs merge=lfs -text
136
+ MetPy/source/tests/plots/baseline/test_declarative_upa_obs_convert_barb_units.png filter=lfs diff=lfs merge=lfs -text
137
+ MetPy/source/tests/plots/baseline/test_declarative_upa_obs.png filter=lfs diff=lfs merge=lfs -text
138
+ MetPy/source/tests/plots/baseline/test_emagram_default_aspect_empty.png filter=lfs diff=lfs merge=lfs -text
139
+ MetPy/source/tests/plots/baseline/test_emagram_mixing_line_args.png filter=lfs diff=lfs merge=lfs -text
140
+ MetPy/source/tests/plots/baseline/test_hodograph_api.png filter=lfs diff=lfs merge=lfs -text
141
+ MetPy/source/tests/plots/baseline/test_latlon.png filter=lfs diff=lfs merge=lfs -text
142
+ MetPy/source/tests/plots/baseline/test_projection_object.png filter=lfs diff=lfs merge=lfs -text
143
+ MetPy/source/tests/plots/baseline/test_scalloped_stroke_closed.png filter=lfs diff=lfs merge=lfs -text
144
+ MetPy/source/tests/plots/baseline/test_scalloped_stroke_segment.png filter=lfs diff=lfs merge=lfs -text
145
+ MetPy/source/tests/plots/baseline/test_skewt_api_units.png filter=lfs diff=lfs merge=lfs -text
146
+ MetPy/source/tests/plots/baseline/test_skewt_api.png filter=lfs diff=lfs merge=lfs -text
147
+ MetPy/source/tests/plots/baseline/test_skewt_default_aspect_empty.png filter=lfs diff=lfs merge=lfs -text
148
+ MetPy/source/tests/plots/baseline/test_skewt_mixing_line_args.png filter=lfs diff=lfs merge=lfs -text
149
+ MetPy/source/tests/plots/baseline/test_stuve_default_aspect_empty.png filter=lfs diff=lfs merge=lfs -text
150
+ MetPy/source/tests/plots/baseline/test_stuve_mixing_line_args.png filter=lfs diff=lfs merge=lfs -text
Dockerfile ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10
2
+
3
+ RUN useradd -m -u 1000 user && python -m pip install --upgrade pip
4
+ USER user
5
+ ENV PATH="/home/user/.local/bin:$PATH"
6
+
7
+ WORKDIR /app
8
+
9
+ COPY --chown=user ./requirements.txt requirements.txt
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ COPY --chown=user . /app
13
+ ENV MCP_TRANSPORT=http
14
+ ENV MCP_PORT=7860
15
+
16
+ EXPOSE 7860
17
+
18
+ CMD ["python", "MetPy/mcp_output/start_mcp.py"]
MetPy/mcp_output/README_MCP.md ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # MetPy MCP (Model Context Protocol) Service README
2
+
3
+ ## 1) Project Introduction
4
+
5
+ This MCP (Model Context Protocol) service wraps core capabilities from [MetPy](https://github.com/Unidata/MetPy) to provide meteorological computation and data-processing tools for LLM-driven workflows.
6
+
7
+ Primary service functions:
8
+ - Thermodynamic diagnostics (e.g., LCL/LFC/EL, CAPE/CIN, mixing ratio, equivalent potential temperature)
9
+ - Kinematic diagnostics (e.g., vorticity, divergence, advection, deformation, frontogenesis)
10
+ - Basic wind/temperature transforms (wind speed/direction/components, potential temperature)
11
+ - Forecast/stability indices (e.g., Showalter, K, Lifted, Total Totals, bulk shear, SRH)
12
+ - Station/radar ingestion (METAR parsing, NEXRAD Level II/III readers)
13
+ - Interpolation/gridding (station-to-grid workflows)
14
+ - xarray + units integration via Pint for safer scientific calculations
15
+
16
+ ---
17
+
18
+ ## 2) Installation Method
19
+
20
+ ### Requirements
21
+ Core dependencies:
22
+ - numpy
23
+ - scipy
24
+ - pint
25
+ - packaging
26
+
27
+ Common optional dependencies (enable richer workflows):
28
+ - xarray, pandas
29
+ - matplotlib, cartopy
30
+ - pyproj
31
+ - netCDF4
32
+ - pooch
33
+ - siphon
34
+
35
+ ### Install
36
+ - Install from PyPI:
37
+ pip install metpy
38
+
39
+ - Recommended extras for full scientific workflows:
40
+ pip install metpy xarray pandas matplotlib cartopy pyproj netCDF4 pooch siphon
41
+
42
+ - For MCP (Model Context Protocol) service development:
43
+ pip install -e .
44
+
45
+ ---
46
+
47
+ ## 3) Quick Start
48
+
49
+ ### Basic usage flow
50
+ 1. Load data (arrays, xarray objects, or text/radar files)
51
+ 2. Attach/use units (Pint via `metpy.units`)
52
+ 3. Run calculation endpoints
53
+ 4. Return structured numeric outputs (and optional metadata)
54
+
55
+ ### Example calls (service-level intent)
56
+ - Thermo:
57
+ - `lcl`, `lfc`, `el`, `cape_cin`
58
+ - `mixing_ratio`, `dewpoint_from_relative_humidity`, `virtual_temperature`
59
+ - Kinematics:
60
+ - `vorticity`, `divergence`, `advection`, `absolute_vorticity`, `frontogenesis`
61
+ - Basic:
62
+ - `wind_speed`, `wind_direction`, `wind_components`, `potential_temperature`
63
+ - IO:
64
+ - `parse_metar_file`
65
+ - `Level2File`, `Level3File` readers
66
+ - Interpolation:
67
+ - `interpolate_to_grid`
68
+
69
+ ---
70
+
71
+ ## 4) Available Tools and Endpoints List
72
+
73
+ Suggested MCP (Model Context Protocol) endpoint layout for this service:
74
+
75
+ - `calc.thermo.parcel_profile`
76
+ Parcel temperature profile computation.
77
+
78
+ - `calc.thermo.lcl` / `calc.thermo.lfc` / `calc.thermo.el`
79
+ Key parcel-level heights/pressures for convective diagnosis.
80
+
81
+ - `calc.thermo.cape_cin`
82
+ Convective available potential energy / inhibition.
83
+
84
+ - `calc.thermo.mixing_ratio`
85
+ Moisture ratio diagnostics.
86
+
87
+ - `calc.thermo.dewpoint_from_relative_humidity`
88
+ Dewpoint from RH and temperature.
89
+
90
+ - `calc.thermo.equivalent_potential_temperature`
91
+ Theta-e for thermodynamic state analysis.
92
+
93
+ - `calc.kinematics.vorticity` / `divergence` / `advection`
94
+ Core flow and derivative diagnostics.
95
+
96
+ - `calc.kinematics.frontogenesis` / `absolute_vorticity` / `q_vector`
97
+ Synoptic and mesoscale diagnostics.
98
+
99
+ - `calc.kinematics.shearing_deformation` / `stretching_deformation` / `total_deformation`
100
+ Deformation field analysis.
101
+
102
+ - `calc.basic.wind_speed` / `wind_direction` / `wind_components`
103
+ Wind vector conversions.
104
+
105
+ - `calc.basic.potential_temperature`
106
+ Thermodynamic transform.
107
+
108
+ - `calc.indices.showalter_index` / `k_index` / `lifted_index` / `total_totals_index`
109
+ Stability index suite.
110
+
111
+ - `calc.indices.bulk_shear` / `storm_relative_helicity`
112
+ Severe-weather kinematic predictors.
113
+
114
+ - `interpolate.grid.interpolate_to_grid`
115
+ Station/objective analysis to grid.
116
+
117
+ - `io.metar.parse_metar_file`
118
+ METAR text ingestion and parsing.
119
+
120
+ - `io.nexrad.Level2File` / `io.nexrad.Level3File`
121
+ NEXRAD binary product readers.
122
+
123
+ - `xarray.preprocess_and_wrap` / `xarray.grid_deltas_from_dataarray`
124
+ Coordinate-aware xarray workflows.
125
+
126
+ ---
127
+
128
+ ## 5) Common Issues and Notes
129
+
130
+ - Units are mandatory for reliable science:
131
+ - Use Pint quantities consistently to avoid silent unit errors.
132
+ - Optional stack matters:
133
+ - Plotting/raster/map workflows need matplotlib/cartopy/pyproj.
134
+ - Radar and IO pipelines may be memory-heavy:
135
+ - For large NEXRAD files, stream/process in chunks where possible.
136
+ - xarray interoperability:
137
+ - Prefer coordinate-aware arrays for derivative operations (dx/dy correctness).
138
+ - No native CLI detected:
139
+ - This repository is primarily a Python library; expose MCP (Model Context Protocol) endpoints at the service layer.
140
+ - Environment recommendations:
141
+ - Use a dedicated virtual environment/conda env to avoid binary dependency conflicts.
142
+
143
+ ---
144
+
145
+ ## 6) Reference Links / Documentation
146
+
147
+ - Repository: https://github.com/Unidata/MetPy
148
+ - Official docs: https://unidata.github.io/MetPy/
149
+ - Project README: https://github.com/Unidata/MetPy/blob/main/README.md
150
+ - Contributing guide: https://github.com/Unidata/MetPy/blob/main/CONTRIBUTING.md
151
+ - Support: https://github.com/Unidata/MetPy/blob/main/SUPPORT.md
MetPy/mcp_output/analysis.json ADDED
@@ -0,0 +1,996 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "summary": {
3
+ "repository_url": "https://github.com/Unidata/MetPy",
4
+ "summary": "Imported via zip fallback, file count: 262",
5
+ "file_tree": {
6
+ ".codecov.yml": {
7
+ "size": 407
8
+ },
9
+ ".devcontainer/devcontainer.json": {
10
+ "size": 1589
11
+ },
12
+ ".github/ISSUE_TEMPLATE/bug_report.yml": {
13
+ "size": 2323
14
+ },
15
+ ".github/ISSUE_TEMPLATE/config.yml": {
16
+ "size": 237
17
+ },
18
+ ".github/ISSUE_TEMPLATE/documentation.yml": {
19
+ "size": 382
20
+ },
21
+ ".github/ISSUE_TEMPLATE/feature_request.yml": {
22
+ "size": 541
23
+ },
24
+ ".github/actions/build-docs/action.yml": {
25
+ "size": 2322
26
+ },
27
+ ".github/actions/install-conda/action.yml": {
28
+ "size": 1387
29
+ },
30
+ ".github/actions/install-pypi/action.yml": {
31
+ "size": 2622
32
+ },
33
+ ".github/actions/run-tests/action.yml": {
34
+ "size": 1836
35
+ },
36
+ ".github/codeql/codeql-config.yml": {
37
+ "size": 139
38
+ },
39
+ ".github/dependabot.yml": {
40
+ "size": 1759
41
+ },
42
+ ".github/pull_request_template.md": {
43
+ "size": 605
44
+ },
45
+ ".github/release.yml": {
46
+ "size": 443
47
+ },
48
+ ".github/workflows/assign-milestone.yml": {
49
+ "size": 1683
50
+ },
51
+ ".github/workflows/automerge-dependabot.yml": {
52
+ "size": 1127
53
+ },
54
+ ".github/workflows/backport-prs.yml": {
55
+ "size": 3850
56
+ },
57
+ ".github/workflows/benchmark-pr.yml": {
58
+ "size": 1561
59
+ },
60
+ ".github/workflows/cffcheck.yml": {
61
+ "size": 560
62
+ },
63
+ ".github/workflows/code-analysis.yml": {
64
+ "size": 846
65
+ },
66
+ ".github/workflows/docs-conda.yml": {
67
+ "size": 1177
68
+ },
69
+ ".github/workflows/docs.yml": {
70
+ "size": 4361
71
+ },
72
+ ".github/workflows/linting.yml": {
73
+ "size": 1943
74
+ },
75
+ ".github/workflows/nightly-builds.yml": {
76
+ "size": 3315
77
+ },
78
+ ".github/workflows/release.yml": {
79
+ "size": 1359
80
+ },
81
+ ".github/workflows/run-unstable-pr.yml": {
82
+ "size": 362
83
+ },
84
+ ".github/workflows/tests-conda.yml": {
85
+ "size": 1877
86
+ },
87
+ ".github/workflows/tests-pypi.yml": {
88
+ "size": 3192
89
+ },
90
+ ".github/workflows/unstable-builds.yml": {
91
+ "size": 2417
92
+ },
93
+ ".lgtm.yml": {
94
+ "size": 171
95
+ },
96
+ ".markdownlint.yaml": {
97
+ "size": 55
98
+ },
99
+ ".qlty/qlty.toml": {
100
+ "size": 282
101
+ },
102
+ ".stickler.yml": {
103
+ "size": 72
104
+ },
105
+ "AUTHORS.txt": {
106
+ "size": 742
107
+ },
108
+ "CLA.md": {
109
+ "size": 11868
110
+ },
111
+ "CODE_OF_CONDUCT.md": {
112
+ "size": 3230
113
+ },
114
+ "CONTRIBUTING.md": {
115
+ "size": 18912
116
+ },
117
+ "README.md": {
118
+ "size": 6298
119
+ },
120
+ "SUPPORT.md": {
121
+ "size": 1847
122
+ },
123
+ "benchmarks/asv.conf.json": {
124
+ "size": 8429
125
+ },
126
+ "benchmarks/benchmarks/__init__.py": {
127
+ "size": 189
128
+ },
129
+ "benchmarks/benchmarks/apparent_temp_benchmarks.py": {
130
+ "size": 2270
131
+ },
132
+ "benchmarks/benchmarks/bound_layer_turbulence_benchmarks.py": {
133
+ "size": 2473
134
+ },
135
+ "benchmarks/benchmarks/dry_thermo_benchmarks.py": {
136
+ "size": 5049
137
+ },
138
+ "benchmarks/benchmarks/dyn_kin_benchmarks.py": {
139
+ "size": 7030
140
+ },
141
+ "benchmarks/benchmarks/math_fctn_benchmarks.py": {
142
+ "size": 3217
143
+ },
144
+ "benchmarks/benchmarks/moist_thermo_benchmarks.py": {
145
+ "size": 11552
146
+ },
147
+ "benchmarks/benchmarks/other_benchmarks.py": {
148
+ "size": 3104
149
+ },
150
+ "benchmarks/benchmarks/smoothing_benchmarks.py": {
151
+ "size": 2701
152
+ },
153
+ "benchmarks/benchmarks/soundings_benchmarks.py": {
154
+ "size": 11218
155
+ },
156
+ "benchmarks/benchmarks/std_atm_benchmarks.py": {
157
+ "size": 2389
158
+ },
159
+ "benchmarks/data_array_generate.py": {
160
+ "size": 10497
161
+ },
162
+ "ci-dev/doc_requirements.txt": {
163
+ "size": 122
164
+ },
165
+ "ci-dev/linting_requirements.txt": {
166
+ "size": 254
167
+ },
168
+ "ci-dev/test_requirements.txt": {
169
+ "size": 79
170
+ },
171
+ "ci/download_cartopy_maps.py": {
172
+ "size": 1059
173
+ },
174
+ "ci/extra_requirements.txt": {
175
+ "size": 75
176
+ },
177
+ "ci/filter_links.py": {
178
+ "size": 1577
179
+ },
180
+ "ci/requirements.txt": {
181
+ "size": 136
182
+ },
183
+ "conftest.py": {
184
+ "size": 6621
185
+ },
186
+ "docs/_static/doc_shared.js": {
187
+ "size": 2554
188
+ },
189
+ "docs/conf.py": {
190
+ "size": 18700
191
+ },
192
+ "docs/devel/CONTRIBUTING.md": {
193
+ "size": 21
194
+ },
195
+ "docs/doc-server.py": {
196
+ "size": 1298
197
+ },
198
+ "docs/make_areas.py": {
199
+ "size": 4350
200
+ },
201
+ "docs/override_check.py": {
202
+ "size": 1137
203
+ },
204
+ "docs/test-server/pst-versions.json": {
205
+ "size": 720
206
+ },
207
+ "docs/userguide/SUPPORT.md": {
208
+ "size": 16
209
+ },
210
+ "examples/Advanced_Sounding.py": {
211
+ "size": 3280
212
+ },
213
+ "examples/Advanced_Sounding_With_Complex_Layout.py": {
214
+ "size": 15252
215
+ },
216
+ "examples/Four_Panel_Map.py": {
217
+ "size": 4745
218
+ },
219
+ "examples/README.txt": {
220
+ "size": 309
221
+ },
222
+ "examples/XArray_Projections.py": {
223
+ "size": 1171
224
+ },
225
+ "examples/calculations/Absolute_Vorticity.py": {
226
+ "size": 1036
227
+ },
228
+ "examples/calculations/Advection.py": {
229
+ "size": 1439
230
+ },
231
+ "examples/calculations/Angle_to_Direction.py": {
232
+ "size": 1305
233
+ },
234
+ "examples/calculations/Bulk_Shear.py": {
235
+ "size": 1807
236
+ },
237
+ "examples/calculations/Dewpoint_and_Mixing_Ratio.py": {
238
+ "size": 1705
239
+ },
240
+ "examples/calculations/Divergence.py": {
241
+ "size": 984
242
+ },
243
+ "examples/calculations/Equivalent_Potential_Temperature.py": {
244
+ "size": 2183
245
+ },
246
+ "examples/calculations/Gradient.py": {
247
+ "size": 2073
248
+ },
249
+ "examples/calculations/High_Low_Analysis.py": {
250
+ "size": 2632
251
+ },
252
+ "examples/calculations/Mean_Pressure_Weighted.py": {
253
+ "size": 1665
254
+ },
255
+ "examples/calculations/Mountain_Problem.py": {
256
+ "size": 2747
257
+ },
258
+ "examples/calculations/Parse_Angles.py": {
259
+ "size": 1063
260
+ },
261
+ "examples/calculations/QVector.py": {
262
+ "size": 2090
263
+ },
264
+ "examples/calculations/README.txt": {
265
+ "size": 104
266
+ },
267
+ "examples/calculations/Shearing_Deformation.py": {
268
+ "size": 1058
269
+ },
270
+ "examples/calculations/Smoothing.py": {
271
+ "size": 2435
272
+ },
273
+ "examples/calculations/Sounding_Calculations.py": {
274
+ "size": 8205
275
+ },
276
+ "examples/calculations/Static_Stability.py": {
277
+ "size": 1599
278
+ },
279
+ "examples/calculations/Stretching_Deformation.py": {
280
+ "size": 1076
281
+ },
282
+ "examples/calculations/Thickness_Hydrostatic.py": {
283
+ "size": 2069
284
+ },
285
+ "examples/calculations/Total_Deformation.py": {
286
+ "size": 1040
287
+ },
288
+ "examples/calculations/Vorticity.py": {
289
+ "size": 993
290
+ },
291
+ "examples/calculations/Wind_Speed.py": {
292
+ "size": 959
293
+ },
294
+ "examples/cross_section.py": {
295
+ "size": 5049
296
+ },
297
+ "examples/formats/GINI_Water_Vapor.py": {
298
+ "size": 1710
299
+ },
300
+ "examples/formats/NEXRAD_Level_2_File.py": {
301
+ "size": 3424
302
+ },
303
+ "examples/formats/NEXRAD_Level_3_File.py": {
304
+ "size": 2282
305
+ },
306
+ "examples/formats/README.txt": {
307
+ "size": 115
308
+ },
309
+ "examples/gridding/Find_Natural_Neighbors_Verification.py": {
310
+ "size": 2557
311
+ },
312
+ "examples/gridding/Inverse_Distance_Verification.py": {
313
+ "size": 7102
314
+ },
315
+ "examples/gridding/Natural_Neighbor_Verification.py": {
316
+ "size": 10380
317
+ },
318
+ "examples/gridding/Point_Interpolation.py": {
319
+ "size": 5119
320
+ },
321
+ "examples/gridding/README.txt": {
322
+ "size": 113
323
+ },
324
+ "examples/gridding/Wind_SLP_Interpolation.py": {
325
+ "size": 4475
326
+ },
327
+ "examples/isentropic_example.py": {
328
+ "size": 7367
329
+ },
330
+ "examples/meteogram_metpy.py": {
331
+ "size": 9027
332
+ },
333
+ "examples/plots/Combined_plotting.py": {
334
+ "size": 1336
335
+ },
336
+ "examples/plots/Hodograph_Inset.py": {
337
+ "size": 2383
338
+ },
339
+ "examples/plots/Mesonet_Stationplot.py": {
340
+ "size": 4217
341
+ },
342
+ "examples/plots/Plotting_Surface_Analysis.py": {
343
+ "size": 3423
344
+ },
345
+ "examples/plots/README.txt": {
346
+ "size": 103
347
+ },
348
+ "examples/plots/Simple_Fronts_Plot.py": {
349
+ "size": 1459
350
+ },
351
+ "examples/plots/Simple_Sounding.py": {
352
+ "size": 3184
353
+ },
354
+ "examples/plots/Simplified_Image_Plot.py": {
355
+ "size": 727
356
+ },
357
+ "examples/plots/Skew-T_Layout.py": {
358
+ "size": 2399
359
+ },
360
+ "examples/plots/Sounding_LCL_Dataset.py": {
361
+ "size": 3080
362
+ },
363
+ "examples/plots/Station_Plot.py": {
364
+ "size": 4577
365
+ },
366
+ "examples/plots/Station_Plot_with_Layout.py": {
367
+ "size": 8152
368
+ },
369
+ "examples/plots/US_Counties.py": {
370
+ "size": 850
371
+ },
372
+ "examples/plots/nhc_wind_probabilities.py": {
373
+ "size": 4274
374
+ },
375
+ "examples/plots/raster_declarative.py": {
376
+ "size": 1426
377
+ },
378
+ "examples/plots/spc_convective_outlook.py": {
379
+ "size": 1777
380
+ },
381
+ "examples/plots/surface_declarative.py": {
382
+ "size": 2181
383
+ },
384
+ "examples/plots/upperair_declarative.py": {
385
+ "size": 2177
386
+ },
387
+ "examples/remote/README.txt": {
388
+ "size": 159
389
+ },
390
+ "examples/remote/basic.py": {
391
+ "size": 1061
392
+ },
393
+ "examples/remote/ml_forecast.py": {
394
+ "size": 1199
395
+ },
396
+ "examples/sigma_to_pressure_interpolation.py": {
397
+ "size": 3493
398
+ },
399
+ "pyproject.toml": {
400
+ "size": 6144
401
+ },
402
+ "setup.cfg": {
403
+ "size": 1049
404
+ },
405
+ "src/metpy/__init__.py": {
406
+ "size": 525
407
+ },
408
+ "src/metpy/_vendor/__init__.py": {
409
+ "size": 189
410
+ },
411
+ "src/metpy/_vendor/xarray.py": {
412
+ "size": 2816
413
+ },
414
+ "src/metpy/_version.py": {
415
+ "size": 1332
416
+ },
417
+ "src/metpy/_warnings.py": {
418
+ "size": 823
419
+ },
420
+ "src/metpy/calc/__init__.py": {
421
+ "size": 1085
422
+ },
423
+ "src/metpy/calc/basic.py": {
424
+ "size": 46972
425
+ },
426
+ "src/metpy/calc/cross_sections.py": {
427
+ "size": 10773
428
+ },
429
+ "src/metpy/calc/exceptions.py": {
430
+ "size": 325
431
+ },
432
+ "src/metpy/calc/indices.py": {
433
+ "size": 26492
434
+ },
435
+ "src/metpy/calc/kinematics.py": {
436
+ "size": 73179
437
+ },
438
+ "src/metpy/calc/thermo.py": {
439
+ "size": 198357
440
+ },
441
+ "src/metpy/calc/tools.py": {
442
+ "size": 82702
443
+ },
444
+ "src/metpy/calc/turbulence.py": {
445
+ "size": 7608
446
+ },
447
+ "src/metpy/cbook.py": {
448
+ "size": 5854
449
+ },
450
+ "src/metpy/constants/__init__.py": {
451
+ "size": 6851
452
+ },
453
+ "src/metpy/constants/default.py": {
454
+ "size": 2978
455
+ },
456
+ "src/metpy/constants/nounit.py": {
457
+ "size": 872
458
+ },
459
+ "src/metpy/deprecation.py": {
460
+ "size": 11148
461
+ },
462
+ "src/metpy/future.py": {
463
+ "size": 285
464
+ },
465
+ "src/metpy/interpolate/__init__.py": {
466
+ "size": 778
467
+ },
468
+ "src/metpy/interpolate/geometry.py": {
469
+ "size": 10454
470
+ },
471
+ "src/metpy/interpolate/grid.py": {
472
+ "size": 12842
473
+ },
474
+ "src/metpy/interpolate/one_dimension.py": {
475
+ "size": 7900
476
+ },
477
+ "src/metpy/interpolate/points.py": {
478
+ "size": 13724
479
+ },
480
+ "src/metpy/interpolate/slices.py": {
481
+ "size": 6754
482
+ },
483
+ "src/metpy/interpolate/tools.py": {
484
+ "size": 4458
485
+ },
486
+ "src/metpy/io/__init__.py": {
487
+ "size": 1305
488
+ },
489
+ "src/metpy/io/_metar_parser.py": {
490
+ "size": 228959
491
+ },
492
+ "src/metpy/io/_nexrad_msgs/__init__.py": {
493
+ "size": 203
494
+ },
495
+ "src/metpy/io/_nexrad_msgs/msg18.py": {
496
+ "size": 43236
497
+ },
498
+ "src/metpy/io/_nexrad_msgs/msg3.py": {
499
+ "size": 18946
500
+ },
501
+ "src/metpy/io/_tools.py": {
502
+ "size": 12319
503
+ },
504
+ "src/metpy/io/gempak.py": {
505
+ "size": 114036
506
+ },
507
+ "src/metpy/io/gini.py": {
508
+ "size": 18521
509
+ },
510
+ "src/metpy/io/metar.py": {
511
+ "size": 20075
512
+ },
513
+ "src/metpy/io/nexrad.py": {
514
+ "size": 119894
515
+ },
516
+ "src/metpy/io/station_data.py": {
517
+ "size": 7650
518
+ },
519
+ "src/metpy/io/text.py": {
520
+ "size": 5899
521
+ },
522
+ "src/metpy/package_tools.py": {
523
+ "size": 2178
524
+ },
525
+ "src/metpy/pandas.py": {
526
+ "size": 826
527
+ },
528
+ "src/metpy/plots/__init__.py": {
529
+ "size": 1851
530
+ },
531
+ "src/metpy/plots/_util.py": {
532
+ "size": 9186
533
+ },
534
+ "src/metpy/plots/cartopy_utils.py": {
535
+ "size": 2810
536
+ },
537
+ "src/metpy/plots/ctables.py": {
538
+ "size": 8716
539
+ },
540
+ "src/metpy/plots/declarative.py": {
541
+ "size": 93828
542
+ },
543
+ "src/metpy/plots/fonts/wx_symbols_license.txt": {
544
+ "size": 328
545
+ },
546
+ "src/metpy/plots/mapping.py": {
547
+ "size": 8548
548
+ },
549
+ "src/metpy/plots/patheffects.py": {
550
+ "size": 43589
551
+ },
552
+ "src/metpy/plots/plot_areas.py": {
553
+ "size": 39092
554
+ },
555
+ "src/metpy/plots/skewt.py": {
556
+ "size": 41279
557
+ },
558
+ "src/metpy/plots/station_plot.py": {
559
+ "size": 27950
560
+ },
561
+ "src/metpy/plots/text.py": {
562
+ "size": 9211
563
+ },
564
+ "src/metpy/plots/wx_symbols.py": {
565
+ "size": 10744
566
+ },
567
+ "src/metpy/remote/__init__.py": {
568
+ "size": 454
569
+ },
570
+ "src/metpy/remote/aws.py": {
571
+ "size": 27436
572
+ },
573
+ "src/metpy/static-data-manifest.txt": {
574
+ "size": 24858
575
+ },
576
+ "src/metpy/testing.py": {
577
+ "size": 11806
578
+ },
579
+ "src/metpy/units.py": {
580
+ "size": 15097
581
+ },
582
+ "src/metpy/xarray.py": {
583
+ "size": 65473
584
+ },
585
+ "staticdata/20110522_OUN_12Z.txt": {
586
+ "size": 5900
587
+ },
588
+ "staticdata/WPC_sfc_fronts_20210628_1800.txt": {
589
+ "size": 3627
590
+ },
591
+ "staticdata/WPC_sfc_fronts_lowres_20210628_1800.txt": {
592
+ "size": 2863
593
+ },
594
+ "staticdata/dec9_sounding.txt": {
595
+ "size": 10765
596
+ },
597
+ "staticdata/jan20_sounding.txt": {
598
+ "size": 6084
599
+ },
600
+ "staticdata/master.txt": {
601
+ "size": 524313
602
+ },
603
+ "staticdata/may22_sounding.txt": {
604
+ "size": 6317
605
+ },
606
+ "staticdata/may4_sounding.txt": {
607
+ "size": 2730
608
+ },
609
+ "staticdata/mesonet_sample.txt": {
610
+ "size": 10075
611
+ },
612
+ "staticdata/metar_20190701_1200.txt": {
613
+ "size": 524313
614
+ },
615
+ "staticdata/nov11_sounding.txt": {
616
+ "size": 4460
617
+ },
618
+ "staticdata/station_data.txt": {
619
+ "size": 188266
620
+ },
621
+ "staticdata/stations.txt": {
622
+ "size": 524313
623
+ },
624
+ "tests/calc/test_basic.py": {
625
+ "size": 35703
626
+ },
627
+ "tests/calc/test_calc_tools.py": {
628
+ "size": 69144
629
+ },
630
+ "tests/calc/test_cross_sections.py": {
631
+ "size": 17512
632
+ },
633
+ "tests/calc/test_indices.py": {
634
+ "size": 16737
635
+ },
636
+ "tests/calc/test_kinematics.py": {
637
+ "size": 116459
638
+ },
639
+ "tests/calc/test_thermo.py": {
640
+ "size": 143254
641
+ },
642
+ "tests/calc/test_turbulence.py": {
643
+ "size": 19114
644
+ },
645
+ "tests/interpolate/test_geometry.py": {
646
+ "size": 6054
647
+ },
648
+ "tests/interpolate/test_grid.py": {
649
+ "size": 11977
650
+ },
651
+ "tests/interpolate/test_interpolate_tools.py": {
652
+ "size": 3800
653
+ },
654
+ "tests/interpolate/test_one_dimension.py": {
655
+ "size": 7950
656
+ },
657
+ "tests/interpolate/test_points.py": {
658
+ "size": 6042
659
+ },
660
+ "tests/interpolate/test_slices.py": {
661
+ "size": 9870
662
+ },
663
+ "tests/io/test_gempak.py": {
664
+ "size": 16437
665
+ },
666
+ "tests/io/test_gini.py": {
667
+ "size": 10122
668
+ },
669
+ "tests/io/test_metar.py": {
670
+ "size": 22999
671
+ },
672
+ "tests/io/test_nexrad.py": {
673
+ "size": 11659
674
+ },
675
+ "tests/io/test_station_data.py": {
676
+ "size": 2500
677
+ },
678
+ "tests/io/test_text.py": {
679
+ "size": 3314
680
+ },
681
+ "tests/io/test_tools.py": {
682
+ "size": 674
683
+ },
684
+ "tests/plots/test_cartopy_utils.py": {
685
+ "size": 3416
686
+ },
687
+ "tests/plots/test_ctables.py": {
688
+ "size": 4814
689
+ },
690
+ "tests/plots/test_declarative.py": {
691
+ "size": 68715
692
+ },
693
+ "tests/plots/test_mapping.py": {
694
+ "size": 10794
695
+ },
696
+ "tests/plots/test_patheffects.py": {
697
+ "size": 6190
698
+ },
699
+ "tests/plots/test_plot_areas.py": {
700
+ "size": 3747
701
+ },
702
+ "tests/plots/test_plot_text.py": {
703
+ "size": 1697
704
+ },
705
+ "tests/plots/test_skewt.py": {
706
+ "size": 32476
707
+ },
708
+ "tests/plots/test_station_plot.py": {
709
+ "size": 15430
710
+ },
711
+ "tests/plots/test_util.py": {
712
+ "size": 4838
713
+ },
714
+ "tests/plots/test_wx_symbols.py": {
715
+ "size": 2405
716
+ },
717
+ "tests/remote/fixtures/test_goes_range.yaml": {
718
+ "size": 19261
719
+ },
720
+ "tests/remote/fixtures/test_goes_single.yaml": {
721
+ "size": 29835
722
+ },
723
+ "tests/remote/fixtures/test_mlwp_range.yaml": {
724
+ "size": 8002
725
+ },
726
+ "tests/remote/fixtures/test_mlwp_single.yaml": {
727
+ "size": 3860
728
+ },
729
+ "tests/remote/fixtures/test_nexrad2_range.yaml": {
730
+ "size": 45493
731
+ },
732
+ "tests/remote/fixtures/test_nexrad2_single.yaml": {
733
+ "size": 124906
734
+ },
735
+ "tests/remote/fixtures/test_nexrad3_range.yaml": {
736
+ "size": 447356
737
+ },
738
+ "tests/remote/fixtures/test_nexrad3_single.yaml": {
739
+ "size": 52451
740
+ },
741
+ "tests/remote/test_aws.py": {
742
+ "size": 5945
743
+ },
744
+ "tests/test_cbook.py": {
745
+ "size": 857
746
+ },
747
+ "tests/test_deprecation.py": {
748
+ "size": 907
749
+ },
750
+ "tests/test_packaging.py": {
751
+ "size": 621
752
+ },
753
+ "tests/test_testing.py": {
754
+ "size": 2581
755
+ },
756
+ "tests/test_xarray.py": {
757
+ "size": 60599
758
+ },
759
+ "tests/units/test_units.py": {
760
+ "size": 9322
761
+ },
762
+ "tools/README.md": {
763
+ "size": 131
764
+ },
765
+ "tools/flake8-metpy/flake8_metpy.py": {
766
+ "size": 2532
767
+ },
768
+ "tools/flake8-metpy/test_flake8_metpy.py": {
769
+ "size": 1014
770
+ },
771
+ "tools/nexrad_msgs/parse_spec.py": {
772
+ "size": 6409
773
+ },
774
+ "tutorials/README.md": {
775
+ "size": 2426
776
+ },
777
+ "tutorials/area_tutorial.py": {
778
+ "size": 3835
779
+ },
780
+ "tutorials/declarative_tutorial.py": {
781
+ "size": 21683
782
+ },
783
+ "tutorials/unit_tutorial.py": {
784
+ "size": 10756
785
+ },
786
+ "tutorials/upperair_soundings.py": {
787
+ "size": 7360
788
+ },
789
+ "tutorials/xarray_tutorial.py": {
790
+ "size": 18510
791
+ }
792
+ },
793
+ "processed_by": "zip_fallback",
794
+ "success": true
795
+ },
796
+ "structure": {
797
+ "packages": [
798
+ "source.benchmarks.benchmarks",
799
+ "source.src.metpy"
800
+ ]
801
+ },
802
+ "dependencies": {
803
+ "has_environment_yml": false,
804
+ "has_requirements_txt": false,
805
+ "pyproject": true,
806
+ "setup_cfg": true,
807
+ "setup_py": false
808
+ },
809
+ "entry_points": {
810
+ "imports": [],
811
+ "cli": [],
812
+ "modules": []
813
+ },
814
+ "llm_analysis": {
815
+ "core_modules": [
816
+ {
817
+ "package": "source.src.metpy",
818
+ "module": "calc.thermo",
819
+ "functions": [
820
+ "parcel_profile",
821
+ "lcl",
822
+ "lfc",
823
+ "el",
824
+ "cape_cin",
825
+ "mixing_ratio",
826
+ "dewpoint_from_relative_humidity",
827
+ "equivalent_potential_temperature",
828
+ "virtual_temperature"
829
+ ],
830
+ "classes": [],
831
+ "description": "Thermodynamic calculations for parcel diagnostics, moisture variables, and severe-weather indices; highest-value computational core."
832
+ },
833
+ {
834
+ "package": "source.src.metpy",
835
+ "module": "calc.kinematics",
836
+ "functions": [
837
+ "vorticity",
838
+ "divergence",
839
+ "advection",
840
+ "frontogenesis",
841
+ "absolute_vorticity",
842
+ "q_vector",
843
+ "shearing_deformation",
844
+ "stretching_deformation",
845
+ "total_deformation"
846
+ ],
847
+ "classes": [],
848
+ "description": "Grid/flow kinematics and derivative-based meteorological diagnostics."
849
+ },
850
+ {
851
+ "package": "source.src.metpy",
852
+ "module": "calc.basic",
853
+ "functions": [
854
+ "wind_speed",
855
+ "wind_direction",
856
+ "wind_components",
857
+ "potential_temperature",
858
+ "height_to_geopotential",
859
+ "geopotential_to_height"
860
+ ],
861
+ "classes": [],
862
+ "description": "Fundamental scalar/vector meteorological transformations used broadly by higher-level modules."
863
+ },
864
+ {
865
+ "package": "source.src.metpy",
866
+ "module": "calc.indices",
867
+ "functions": [
868
+ "showalter_index",
869
+ "k_index",
870
+ "total_totals_index",
871
+ "lifted_index",
872
+ "bulk_shear",
873
+ "storm_relative_helicity"
874
+ ],
875
+ "classes": [],
876
+ "description": "Forecast and severe-weather stability/kinematic indices frequently used in operational workflows."
877
+ },
878
+ {
879
+ "package": "source.src.metpy",
880
+ "module": "interpolate.grid",
881
+ "functions": [
882
+ "interpolate_to_grid"
883
+ ],
884
+ "classes": [],
885
+ "description": "Objective analysis/gridding entry point for station-to-grid interpolation workflows."
886
+ },
887
+ {
888
+ "package": "source.src.metpy",
889
+ "module": "io.metar",
890
+ "functions": [
891
+ "parse_metar_file"
892
+ ],
893
+ "classes": [],
894
+ "description": "METAR text decoding utilities for ingesting observational station reports."
895
+ },
896
+ {
897
+ "package": "source.src.metpy",
898
+ "module": "io.nexrad",
899
+ "functions": [],
900
+ "classes": [
901
+ "Level2File",
902
+ "Level3File"
903
+ ],
904
+ "description": "NEXRAD radar file readers for binary Level II/III products."
905
+ },
906
+ {
907
+ "package": "source.src.metpy",
908
+ "module": "plots.skewt",
909
+ "functions": [],
910
+ "classes": [
911
+ "SkewT",
912
+ "Hodograph"
913
+ ],
914
+ "description": "Sounding visualization primitives; useful but higher dependency footprint."
915
+ },
916
+ {
917
+ "package": "source.src.metpy",
918
+ "module": "xarray",
919
+ "functions": [
920
+ "preprocess_and_wrap",
921
+ "check_axis",
922
+ "grid_deltas_from_dataarray"
923
+ ],
924
+ "classes": [
925
+ "MetPyDataArrayAccessor",
926
+ "MetPyDatasetAccessor"
927
+ ],
928
+ "description": "xarray integration/accessor layer that enables coordinate-aware calculations and unit-aware parsing."
929
+ },
930
+ {
931
+ "package": "source.src.metpy",
932
+ "module": "units",
933
+ "functions": [
934
+ "units",
935
+ "pandas_dataframe_to_unit_arrays",
936
+ "concatenate",
937
+ "masked_array"
938
+ ],
939
+ "classes": [],
940
+ "description": "Unit handling bridge (Pint-centric) that underpins nearly all safe numeric usage in MetPy."
941
+ }
942
+ ],
943
+ "cli_commands": [
944
+ {
945
+ "name": "none_detected",
946
+ "module": "N/A",
947
+ "description": "No explicit CLI entry points detected in scanned package paths (no console_scripts or __main__-style command modules identified in source.src.metpy)."
948
+ }
949
+ ],
950
+ "import_strategy": {
951
+ "primary": "import",
952
+ "fallback": "blackbox",
953
+ "confidence": 0.93
954
+ },
955
+ "dependencies": {
956
+ "required": [
957
+ "numpy",
958
+ "scipy",
959
+ "pint",
960
+ "packaging"
961
+ ],
962
+ "optional": [
963
+ "xarray",
964
+ "pandas",
965
+ "matplotlib",
966
+ "cartopy",
967
+ "pyproj",
968
+ "netCDF4",
969
+ "pooch",
970
+ "siphon"
971
+ ]
972
+ },
973
+ "risk_assessment": {
974
+ "import_feasibility": 0.91,
975
+ "intrusiveness_risk": "low",
976
+ "complexity": "medium"
977
+ }
978
+ },
979
+ "deepwiki_analysis": {
980
+ "repo_url": "https://github.com/Unidata/MetPy",
981
+ "repo_name": "MetPy",
982
+ "error": "DeepWiki analysis failed",
983
+ "model": "gpt-5.3-codex",
984
+ "source": "llm_direct_analysis",
985
+ "success": false
986
+ },
987
+ "deepwiki_options": {
988
+ "enabled": true,
989
+ "model": "gpt-5.3-codex"
990
+ },
991
+ "risk": {
992
+ "import_feasibility": 0.91,
993
+ "intrusiveness_risk": "low",
994
+ "complexity": "medium"
995
+ }
996
+ }
MetPy/mcp_output/diff_report.md ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # MetPy Difference Report
2
+
3
+ **Repository:** MetPy
4
+ **Project Type:** Python library
5
+ **Assessment Time:** 2026-03-12 09:35:35
6
+ **Intrusiveness:** None
7
+ **Workflow Status:** ✅ Success
8
+ **Test Status:** ❌ Failed
9
+ **File Changes:** 8 new files, 0 modified files
10
+
11
+ ---
12
+
13
+ ## 1. Project Overview
14
+
15
+ This change set introduces **new artifacts only** (no edits to existing code paths), indicating an additive update intended to extend or support basic functionality in the MetPy project.
16
+ Given that the workflow completed successfully but tests failed, the repository is operational at a pipeline level, but code quality and/or integration validity is currently blocked by test regressions.
17
+
18
+ ---
19
+
20
+ ## 2. Change Summary
21
+
22
+ | Metric | Value |
23
+ |---|---|
24
+ | New files | 8 |
25
+ | Modified files | 0 |
26
+ | Deleted files | 0 (not reported) |
27
+ | Intrusiveness | None |
28
+ | CI workflow | Success |
29
+ | Test suite | Failed |
30
+
31
+ ### Interpretation
32
+ - **Low-risk structural change** from a source-control perspective (no existing file edits).
33
+ - **Functional risk remains** due to failed tests, which may indicate:
34
+ - missing integration wiring for new files,
35
+ - incomplete implementation,
36
+ - incorrect assumptions in new logic,
37
+ - test environment/configuration drift.
38
+
39
+ ---
40
+
41
+ ## 3. Difference Analysis
42
+
43
+ Because only new files were added:
44
+ 1. **Backward compatibility risk is likely limited** at source level (no direct modifications).
45
+ 2. **Runtime risk still exists** if new files are imported or auto-discovered by package initialization.
46
+ 3. **Packaging/distribution risk** may increase if setup metadata includes new modules without required dependencies.
47
+ 4. **Quality gate failure** (tests) prevents safe promotion despite clean workflow execution.
48
+
49
+ ---
50
+
51
+ ## 4. Technical Analysis
52
+
53
+ ## 4.1 CI Signal Split
54
+ - **Workflow Success** suggests:
55
+ - lint/build steps likely passed,
56
+ - pipeline orchestration and environment provisioning are healthy.
57
+ - **Test Failure** suggests:
58
+ - logic-level or integration-level issues,
59
+ - potential mismatch between expected and actual behavior.
60
+
61
+ ## 4.2 Potential Failure Categories (for new-file-only changes)
62
+ - **Uncovered edge cases** in newly introduced functionality.
63
+ - **Import-time side effects** causing failures in unrelated tests.
64
+ - **Dependency omissions** (requirements not updated but new files rely on external packages).
65
+ - **Test expectation drift** if baseline outputs changed.
66
+ - **Discovery/registration issues** (plugins, entry points, module exports).
67
+
68
+ ## 4.3 Risk Assessment
69
+
70
+ | Area | Risk | Notes |
71
+ |---|---|---|
72
+ | Existing core behavior | Low–Medium | No modified files, but import/discovery could still affect runtime |
73
+ | New functionality correctness | Medium–High | Tests failing indicate unresolved issues |
74
+ | Release readiness | High risk (not ready) | Failing tests block reliable release |
75
+ | Operational deployment | Medium | Depends on whether new code path is active by default |
76
+
77
+ ---
78
+
79
+ ## 5. Recommendations & Improvements
80
+
81
+ ## 5.1 Immediate Actions (Blockers)
82
+ 1. **Triage failing tests by category**
83
+ - Unit vs integration vs regression.
84
+ - New feature tests vs unrelated legacy tests.
85
+ 2. **Map failures to added files**
86
+ - Trace stack traces to determine whether failures originate from the new modules.
87
+ 3. **Fix and re-run full test matrix**
88
+ - Include supported Python versions and optional dependency sets.
89
+
90
+ ## 5.2 Code Quality Enhancements
91
+ - Add/expand **unit tests per new file** (happy path + edge cases + invalid inputs).
92
+ - Verify **typing and API contracts** for public-facing additions.
93
+ - Ensure **docs and examples** align with actual behavior.
94
+ - Validate **import safety** (avoid heavy work at module import time).
95
+
96
+ ## 5.3 Release Hygiene
97
+ - Update changelog with clear “Added” entries.
98
+ - Confirm packaging metadata includes/excludes new files intentionally.
99
+ - If functionality is experimental, gate behind feature flags or internal namespace.
100
+
101
+ ---
102
+
103
+ ## 6. Deployment Information
104
+
105
+ **Current deployment recommendation:** ⛔ **Do not deploy/promote** this revision to production or release tags while test status is failed.
106
+
107
+ ### Pre-deployment Checklist
108
+ - [ ] All failing tests fixed and passing.
109
+ - [ ] New files covered by tests at acceptable threshold.
110
+ - [ ] Static checks (lint/type/security) pass.
111
+ - [ ] Packaging and import validation pass.
112
+ - [ ] Release notes/changelog updated.
113
+ - [ ] CI rerun successful on full matrix.
114
+
115
+ ---
116
+
117
+ ## 7. Future Planning
118
+
119
+ ## 7.1 Short-Term (Next 1–2 iterations)
120
+ - Stabilize failing tests and merge with green CI.
121
+ - Add regression tests tied to current failure signatures.
122
+ - Improve test diagnostics (clear assertions, deterministic fixtures).
123
+
124
+ ## 7.2 Mid-Term
125
+ - Strengthen contribution guardrails:
126
+ - require test pass before merge,
127
+ - enforce coverage delta checks for newly added files.
128
+ - Introduce targeted smoke tests for package import and basic functionality.
129
+
130
+ ## 7.3 Long-Term
131
+ - Build reliability dashboards for CI trends (pass rate, flaky tests, duration).
132
+ - Standardize module templates for new files (tests + docs + typing + examples).
133
+
134
+ ---
135
+
136
+ ## 8. Conclusion
137
+
138
+ This update is structurally additive (**8 new files, no direct edits**), which is generally low-intrusive. However, **failed tests make the change set not release-ready**. Prioritize root-cause analysis of failures, close coverage gaps for the new files, and require a fully green CI matrix before deployment.
MetPy/mcp_output/mcp_plugin/__init__.py ADDED
File without changes
MetPy/mcp_output/mcp_plugin/adapter.py ADDED
@@ -0,0 +1,293 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import traceback
4
+ import importlib
5
+ from typing import Any, Dict, List, Optional
6
+
7
+ source_path = os.path.join(
8
+ os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
9
+ "source",
10
+ )
11
+ sys.path.insert(0, source_path)
12
+
13
+
14
+ class Adapter:
15
+ """
16
+ MCP Import-Mode Adapter for MetPy repository.
17
+
18
+ This adapter prefers direct import/use of repository modules and provides a
19
+ graceful fallback "blackbox" mode when imports fail.
20
+ """
21
+
22
+ def __init__(self) -> None:
23
+ self.mode = "import"
24
+ self.repo_url = "https://github.com/Unidata/MetPy"
25
+ self.package_root = "src.metpy"
26
+ self._modules: Dict[str, Any] = {}
27
+ self._import_errors: List[str] = []
28
+ self._load_modules()
29
+
30
+ # -------------------------------------------------------------------------
31
+ # Internal helpers
32
+ # -------------------------------------------------------------------------
33
+ def _ok(self, data: Any = None, message: str = "success", **extra: Any) -> Dict[str, Any]:
34
+ resp = {"status": "success", "mode": self.mode, "message": message, "data": data}
35
+ if extra:
36
+ resp.update(extra)
37
+ return resp
38
+
39
+ def _err(self, message: str, error: Optional[Exception] = None, **extra: Any) -> Dict[str, Any]:
40
+ resp = {"status": "error", "mode": self.mode, "message": message}
41
+ if error is not None:
42
+ resp["error"] = str(error)
43
+ resp["traceback"] = traceback.format_exc(limit=2)
44
+ if extra:
45
+ resp.update(extra)
46
+ return resp
47
+
48
+ def _fallback(self, action: str, guidance: str) -> Dict[str, Any]:
49
+ return {
50
+ "status": "fallback",
51
+ "mode": self.mode,
52
+ "action": action,
53
+ "message": "Import mode unavailable. Switched to fallback behavior.",
54
+ "guidance": guidance,
55
+ }
56
+
57
+ def _load_modules(self) -> None:
58
+ targets = [
59
+ "src.metpy",
60
+ "src.metpy.calc",
61
+ "src.metpy.interpolate",
62
+ "src.metpy.io",
63
+ "src.metpy.plots",
64
+ "src.metpy.remote",
65
+ "src.metpy.units",
66
+ "src.metpy.constants",
67
+ "src.metpy.xarray",
68
+ ]
69
+ for name in targets:
70
+ try:
71
+ self._modules[name] = importlib.import_module(name)
72
+ except Exception as e:
73
+ self._import_errors.append(f"{name}: {e}")
74
+
75
+ if self._import_errors:
76
+ self.mode = "blackbox"
77
+
78
+ def health(self) -> Dict[str, Any]:
79
+ """
80
+ Return adapter health and import diagnostics.
81
+ """
82
+ return self._ok(
83
+ data={
84
+ "repo_url": self.repo_url,
85
+ "package_root": self.package_root,
86
+ "loaded_modules": sorted(self._modules.keys()),
87
+ "import_errors": self._import_errors,
88
+ },
89
+ message="adapter initialized",
90
+ )
91
+
92
+ # -------------------------------------------------------------------------
93
+ # Module management
94
+ # -------------------------------------------------------------------------
95
+ def list_modules(self) -> Dict[str, Any]:
96
+ """
97
+ List managed MetPy modules loaded by this adapter.
98
+ """
99
+ return self._ok(data=sorted(self._modules.keys()))
100
+
101
+ def get_module(self, module_name: str) -> Dict[str, Any]:
102
+ """
103
+ Get a loaded module by full path (e.g., 'src.metpy.calc').
104
+
105
+ Parameters:
106
+ module_name: Full module import path.
107
+ """
108
+ try:
109
+ mod = self._modules.get(module_name)
110
+ if mod is None:
111
+ return self._err(
112
+ f"Module '{module_name}' is not loaded.",
113
+ None,
114
+ guidance="Call reload_module or verify source path and dependencies.",
115
+ )
116
+ return self._ok(data={"module": module_name, "repr": repr(mod)})
117
+ except Exception as e:
118
+ return self._err("Failed to access module.", e)
119
+
120
+ def reload_module(self, module_name: str) -> Dict[str, Any]:
121
+ """
122
+ Reload a specific module by full path.
123
+ """
124
+ try:
125
+ mod = importlib.import_module(module_name)
126
+ mod = importlib.reload(mod)
127
+ self._modules[module_name] = mod
128
+ if self.mode == "blackbox":
129
+ self.mode = "import"
130
+ return self._ok(data={"module": module_name}, message="module reloaded")
131
+ except Exception as e:
132
+ self.mode = "blackbox"
133
+ return self._err(
134
+ f"Failed to reload module '{module_name}'.",
135
+ e,
136
+ guidance="Ensure optional dependencies are installed and module path is correct.",
137
+ )
138
+
139
+ # -------------------------------------------------------------------------
140
+ # Generic invocation utilities (comprehensive fallback for unknown symbols)
141
+ # -------------------------------------------------------------------------
142
+ def call_function(self, module_name: str, function_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
143
+ """
144
+ Call any function from a managed module.
145
+
146
+ Parameters:
147
+ module_name: Full module path, e.g., 'src.metpy.calc'
148
+ function_name: Function attribute name
149
+ *args/**kwargs: Forwarded to target function
150
+ """
151
+ if self.mode != "import":
152
+ return self._fallback(
153
+ action="call_function",
154
+ guidance="Restore import mode by installing required dependencies: numpy, scipy, pint, packaging.",
155
+ )
156
+ try:
157
+ mod = self._modules.get(module_name) or importlib.import_module(module_name)
158
+ fn = getattr(mod, function_name, None)
159
+ if fn is None or not callable(fn):
160
+ return self._err(
161
+ f"Function '{function_name}' not found in module '{module_name}'.",
162
+ None,
163
+ guidance="Inspect available names via inspect_module.",
164
+ )
165
+ result = fn(*args, **kwargs)
166
+ return self._ok(data=result, message=f"{module_name}.{function_name} executed")
167
+ except Exception as e:
168
+ return self._err(
169
+ f"Failed to execute function '{function_name}' from '{module_name}'.",
170
+ e,
171
+ guidance="Verify parameter units/types expected by MetPy functions.",
172
+ )
173
+
174
+ def create_instance(self, module_name: str, class_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
175
+ """
176
+ Instantiate any class from a managed module.
177
+
178
+ Parameters:
179
+ module_name: Full module path, e.g., 'src.metpy.plots'
180
+ class_name: Class attribute name
181
+ *args/**kwargs: Forwarded to class constructor
182
+ """
183
+ if self.mode != "import":
184
+ return self._fallback(
185
+ action="create_instance",
186
+ guidance="Install optional plotting/IO dependencies if constructing advanced classes.",
187
+ )
188
+ try:
189
+ mod = self._modules.get(module_name) or importlib.import_module(module_name)
190
+ cls = getattr(mod, class_name, None)
191
+ if cls is None:
192
+ return self._err(
193
+ f"Class '{class_name}' not found in module '{module_name}'.",
194
+ None,
195
+ guidance="Inspect available names via inspect_module.",
196
+ )
197
+ instance = cls(*args, **kwargs)
198
+ return self._ok(data={"class": class_name, "instance_repr": repr(instance)})
199
+ except Exception as e:
200
+ return self._err(
201
+ f"Failed to instantiate class '{class_name}' from '{module_name}'.",
202
+ e,
203
+ guidance="Check constructor arguments and optional dependency availability.",
204
+ )
205
+
206
+ def inspect_module(self, module_name: str, include_private: bool = False) -> Dict[str, Any]:
207
+ """
208
+ Inspect attributes of a module and group into classes/functions/others.
209
+ """
210
+ try:
211
+ mod = self._modules.get(module_name) or importlib.import_module(module_name)
212
+ names = dir(mod)
213
+ if not include_private:
214
+ names = [n for n in names if not n.startswith("_")]
215
+ classes, functions, others = [], [], []
216
+ for n in names:
217
+ obj = getattr(mod, n, None)
218
+ if obj is None:
219
+ continue
220
+ if isinstance(obj, type):
221
+ classes.append(n)
222
+ elif callable(obj):
223
+ functions.append(n)
224
+ else:
225
+ others.append(n)
226
+ return self._ok(
227
+ data={
228
+ "module": module_name,
229
+ "classes": sorted(classes),
230
+ "functions": sorted(functions),
231
+ "others": sorted(others),
232
+ }
233
+ )
234
+ except Exception as e:
235
+ return self._err("Failed to inspect module.", e)
236
+
237
+ # -------------------------------------------------------------------------
238
+ # High-level convenience wrappers for detected core modules
239
+ # -------------------------------------------------------------------------
240
+ def calc_call(self, function_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
241
+ """Call a function from src.metpy.calc."""
242
+ return self.call_function("src.metpy.calc", function_name, *args, **kwargs)
243
+
244
+ def interpolate_call(self, function_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
245
+ """Call a function from src.metpy.interpolate."""
246
+ return self.call_function("src.metpy.interpolate", function_name, *args, **kwargs)
247
+
248
+ def io_call(self, function_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
249
+ """Call a function from src.metpy.io."""
250
+ return self.call_function("src.metpy.io", function_name, *args, **kwargs)
251
+
252
+ def plots_call(self, function_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
253
+ """Call a function from src.metpy.plots."""
254
+ return self.call_function("src.metpy.plots", function_name, *args, **kwargs)
255
+
256
+ def remote_call(self, function_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
257
+ """Call a function from src.metpy.remote."""
258
+ return self.call_function("src.metpy.remote", function_name, *args, **kwargs)
259
+
260
+ def units_call(self, function_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
261
+ """Call a function from src.metpy.units."""
262
+ return self.call_function("src.metpy.units", function_name, *args, **kwargs)
263
+
264
+ def xarray_call(self, function_name: str, *args: Any, **kwargs: Any) -> Dict[str, Any]:
265
+ """Call a function from src.metpy.xarray."""
266
+ return self.call_function("src.metpy.xarray", function_name, *args, **kwargs)
267
+
268
+ # -------------------------------------------------------------------------
269
+ # Dependency and guidance utilities
270
+ # -------------------------------------------------------------------------
271
+ def dependency_status(self) -> Dict[str, Any]:
272
+ """
273
+ Check required and optional dependencies from analysis guidance.
274
+ """
275
+ required = ["numpy", "scipy", "pint", "packaging"]
276
+ optional = ["xarray", "pandas", "matplotlib", "cartopy", "pyproj", "netCDF4", "pooch", "siphon"]
277
+
278
+ def check(pkg: str) -> bool:
279
+ try:
280
+ importlib.import_module(pkg)
281
+ return True
282
+ except Exception:
283
+ return False
284
+
285
+ req_status = {p: check(p) for p in required}
286
+ opt_status = {p: check(p) for p in optional}
287
+ return self._ok(
288
+ data={
289
+ "required": req_status,
290
+ "optional": opt_status,
291
+ "all_required_available": all(req_status.values()),
292
+ }
293
+ )
MetPy/mcp_output/mcp_plugin/main.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ MCP Service Auto-Wrapper - Auto-generated
3
+ """
4
+ from mcp_service import create_app
5
+
6
+ def main():
7
+ """Main entry point"""
8
+ app = create_app()
9
+ return app
10
+
11
+ if __name__ == "__main__":
12
+ app = main()
13
+ app.run()
MetPy/mcp_output/mcp_plugin/mcp_service.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ from typing import Any, Dict, List, Optional
4
+
5
+ source_path = os.path.join(
6
+ os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
7
+ "source",
8
+ )
9
+ if source_path not in sys.path:
10
+ sys.path.insert(0, source_path)
11
+
12
+ from fastmcp import FastMCP
13
+ from metpy.calc import (
14
+ advection,
15
+ cape_cin,
16
+ dewpoint_from_relative_humidity,
17
+ equivalent_potential_temperature,
18
+ lcl,
19
+ potential_temperature,
20
+ relative_humidity_from_dewpoint,
21
+ vorticity,
22
+ wind_speed,
23
+ )
24
+ from metpy.constants import g
25
+ from metpy.io import parse_metar_file
26
+ from metpy.units import units
27
+
28
+ mcp = FastMCP("metpy_service")
29
+
30
+
31
+ def _ok(result: Any) -> Dict[str, Any]:
32
+ return {"success": True, "result": result, "error": None}
33
+
34
+
35
+ def _err(message: str) -> Dict[str, Any]:
36
+ return {"success": False, "result": None, "error": message}
37
+
38
+
39
+ @mcp.tool(name="calc_wind_speed", description="Compute wind speed from u and v wind components.")
40
+ def calc_wind_speed(u_ms: float, v_ms: float) -> Dict[str, Any]:
41
+ """
42
+ Calculate scalar wind speed.
43
+
44
+ Parameters:
45
+ u_ms: Zonal wind component in meters per second.
46
+ v_ms: Meridional wind component in meters per second.
47
+
48
+ Returns:
49
+ Dictionary with success/result/error where result is wind speed in m/s.
50
+ """
51
+ try:
52
+ ws = wind_speed(u_ms * units("m/s"), v_ms * units("m/s"))
53
+ return _ok(float(ws.to("m/s").magnitude))
54
+ except Exception as exc:
55
+ return _err(str(exc))
56
+
57
+
58
+ @mcp.tool(name="calc_potential_temperature", description="Compute potential temperature from pressure and temperature.")
59
+ def calc_potential_temperature(pressure_hpa: float, temperature_c: float) -> Dict[str, Any]:
60
+ """
61
+ Calculate potential temperature.
62
+
63
+ Parameters:
64
+ pressure_hpa: Air pressure in hPa.
65
+ temperature_c: Air temperature in degrees Celsius.
66
+
67
+ Returns:
68
+ Dictionary with success/result/error where result is theta in Kelvin.
69
+ """
70
+ try:
71
+ theta = potential_temperature(pressure_hpa * units.hPa, temperature_c * units.degC)
72
+ return _ok(float(theta.to("kelvin").magnitude))
73
+ except Exception as exc:
74
+ return _err(str(exc))
75
+
76
+
77
+ @mcp.tool(name="calc_lcl", description="Compute lifting condensation level pressure and temperature.")
78
+ def calc_lcl(
79
+ pressure_hpa: float, temperature_c: float, dewpoint_c: float
80
+ ) -> Dict[str, Any]:
81
+ """
82
+ Calculate LCL pressure and temperature.
83
+
84
+ Parameters:
85
+ pressure_hpa: Parcel pressure in hPa.
86
+ temperature_c: Parcel temperature in degrees Celsius.
87
+ dewpoint_c: Parcel dewpoint in degrees Celsius.
88
+
89
+ Returns:
90
+ Dictionary with success/result/error where result contains lcl_pressure_hpa and lcl_temperature_c.
91
+ """
92
+ try:
93
+ lcl_p, lcl_t = lcl(
94
+ pressure_hpa * units.hPa,
95
+ temperature_c * units.degC,
96
+ dewpoint_c * units.degC,
97
+ )
98
+ return _ok(
99
+ {
100
+ "lcl_pressure_hpa": float(lcl_p.to("hPa").magnitude),
101
+ "lcl_temperature_c": float(lcl_t.to("degC").magnitude),
102
+ }
103
+ )
104
+ except Exception as exc:
105
+ return _err(str(exc))
106
+
107
+
108
+ @mcp.tool(name="calc_rh_from_dewpoint", description="Compute relative humidity from temperature and dewpoint.")
109
+ def calc_rh_from_dewpoint(temperature_c: float, dewpoint_c: float) -> Dict[str, Any]:
110
+ """
111
+ Calculate relative humidity.
112
+
113
+ Parameters:
114
+ temperature_c: Air temperature in degrees Celsius.
115
+ dewpoint_c: Dewpoint temperature in degrees Celsius.
116
+
117
+ Returns:
118
+ Dictionary with success/result/error where result is relative humidity as a fraction [0, 1].
119
+ """
120
+ try:
121
+ rh = relative_humidity_from_dewpoint(
122
+ temperature_c * units.degC, dewpoint_c * units.degC
123
+ )
124
+ return _ok(float(rh.to("dimensionless").magnitude))
125
+ except Exception as exc:
126
+ return _err(str(exc))
127
+
128
+
129
+ @mcp.tool(name="calc_dewpoint_from_rh", description="Compute dewpoint from air temperature and relative humidity.")
130
+ def calc_dewpoint_from_rh(temperature_c: float, relative_humidity: float) -> Dict[str, Any]:
131
+ """
132
+ Calculate dewpoint from temperature and RH.
133
+
134
+ Parameters:
135
+ temperature_c: Air temperature in degrees Celsius.
136
+ relative_humidity: Relative humidity as fraction [0, 1].
137
+
138
+ Returns:
139
+ Dictionary with success/result/error where result is dewpoint in degrees Celsius.
140
+ """
141
+ try:
142
+ td = dewpoint_from_relative_humidity(
143
+ temperature_c * units.degC, relative_humidity * units.dimensionless
144
+ )
145
+ return _ok(float(td.to("degC").magnitude))
146
+ except Exception as exc:
147
+ return _err(str(exc))
148
+
149
+
150
+ @mcp.tool(name="calc_equivalent_potential_temperature", description="Compute equivalent potential temperature.")
151
+ def calc_equivalent_potential_temperature(
152
+ pressure_hpa: float, temperature_c: float, dewpoint_c: float
153
+ ) -> Dict[str, Any]:
154
+ """
155
+ Calculate equivalent potential temperature.
156
+
157
+ Parameters:
158
+ pressure_hpa: Air pressure in hPa.
159
+ temperature_c: Air temperature in degrees Celsius.
160
+ dewpoint_c: Dewpoint temperature in degrees Celsius.
161
+
162
+ Returns:
163
+ Dictionary with success/result/error where result is theta-e in Kelvin.
164
+ """
165
+ try:
166
+ theta_e = equivalent_potential_temperature(
167
+ pressure_hpa * units.hPa, temperature_c * units.degC, dewpoint_c * units.degC
168
+ )
169
+ return _ok(float(theta_e.to("kelvin").magnitude))
170
+ except Exception as exc:
171
+ return _err(str(exc))
172
+
173
+
174
+ @mcp.tool(name="calc_cape_cin", description="Compute CAPE and CIN from vertical profiles.")
175
+ def calc_cape_cin(
176
+ pressure_hpa: List[float], temperature_c: List[float], dewpoint_c: List[float]
177
+ ) -> Dict[str, Any]:
178
+ """
179
+ Calculate CAPE and CIN for a sounding profile.
180
+
181
+ Parameters:
182
+ pressure_hpa: Pressure profile in hPa.
183
+ temperature_c: Temperature profile in degrees Celsius.
184
+ dewpoint_c: Dewpoint profile in degrees Celsius.
185
+
186
+ Returns:
187
+ Dictionary with success/result/error where result contains cape_j_per_kg and cin_j_per_kg.
188
+ """
189
+ try:
190
+ p = pressure_hpa * units.hPa
191
+ t = temperature_c * units.degC
192
+ td = dewpoint_c * units.degC
193
+ cape, cin = cape_cin(p, t, td)
194
+ return _ok(
195
+ {
196
+ "cape_j_per_kg": float(cape.to("J/kg").magnitude),
197
+ "cin_j_per_kg": float(cin.to("J/kg").magnitude),
198
+ }
199
+ )
200
+ except Exception as exc:
201
+ return _err(str(exc))
202
+
203
+
204
+ @mcp.tool(name="calc_vertical_vorticity", description="Compute vertical vorticity from wind and grid spacing.")
205
+ def calc_vertical_vorticity(
206
+ u_ms: List[List[float]],
207
+ v_ms: List[List[float]],
208
+ dx_m: float,
209
+ dy_m: float,
210
+ ) -> Dict[str, Any]:
211
+ """
212
+ Calculate vertical vorticity on a regular grid.
213
+
214
+ Parameters:
215
+ u_ms: 2D zonal wind field in m/s.
216
+ v_ms: 2D meridional wind field in m/s.
217
+ dx_m: Grid spacing in x direction (meters).
218
+ dy_m: Grid spacing in y direction (meters).
219
+
220
+ Returns:
221
+ Dictionary with success/result/error where result is a 2D list of vorticity in 1/s.
222
+ """
223
+ try:
224
+ import numpy as np
225
+
226
+ u = np.array(u_ms) * units("m/s")
227
+ v = np.array(v_ms) * units("m/s")
228
+ zeta = vorticity(u, v, dx=dx_m * units.meter, dy=dy_m * units.meter)
229
+ return _ok(zeta.to("1/s").magnitude.tolist())
230
+ except Exception as exc:
231
+ return _err(str(exc))
232
+
233
+
234
+ @mcp.tool(name="calc_advection_scalar", description="Compute scalar advection using wind and scalar field gradients.")
235
+ def calc_advection_scalar(
236
+ scalar_field: List[List[float]],
237
+ u_ms: List[List[float]],
238
+ v_ms: List[List[float]],
239
+ dx_m: float,
240
+ dy_m: float,
241
+ ) -> Dict[str, Any]:
242
+ """
243
+ Calculate horizontal advection of a scalar field.
244
+
245
+ Parameters:
246
+ scalar_field: 2D scalar field (unitless values).
247
+ u_ms: 2D zonal wind field in m/s.
248
+ v_ms: 2D meridional wind field in m/s.
249
+ dx_m: Grid spacing in x direction in meters.
250
+ dy_m: Grid spacing in y direction in meters.
251
+
252
+ Returns:
253
+ Dictionary with success/result/error where result is 2D advection field in scalar units per second.
254
+ """
255
+ try:
256
+ import numpy as np
257
+
258
+ s = np.array(scalar_field) * units.dimensionless
259
+ u = np.array(u_ms) * units("m/s")
260
+ v = np.array(v_ms) * units("m/s")
261
+ adv = advection(s, u=u, v=v, dx=dx_m * units.meter, dy=dy_m * units.meter)
262
+ return _ok(adv.to("1/s").magnitude.tolist())
263
+ except Exception as exc:
264
+ return _err(str(exc))
265
+
266
+
267
+ @mcp.tool(name="parse_metar_text", description="Parse METAR text and return selected station records.")
268
+ def parse_metar_text(metar_text: str, year: Optional[int] = None, month: Optional[int] = None) -> Dict[str, Any]:
269
+ """
270
+ Parse raw METAR text content.
271
+
272
+ Parameters:
273
+ metar_text: Raw METAR bulletin text with one report per line.
274
+ year: Optional year for parsing date context.
275
+ month: Optional month for parsing date context.
276
+
277
+ Returns:
278
+ Dictionary with success/result/error where result is a list of parsed records.
279
+ """
280
+ try:
281
+ import io
282
+
283
+ kwargs: Dict[str, Any] = {}
284
+ if year is not None:
285
+ kwargs["year"] = year
286
+ if month is not None:
287
+ kwargs["month"] = month
288
+
289
+ df = parse_metar_file(io.StringIO(metar_text), **kwargs)
290
+ records = df.head(100).to_dict(orient="records")
291
+ return _ok(records)
292
+ except Exception as exc:
293
+ return _err(str(exc))
294
+
295
+
296
+ @mcp.tool(name="get_gravity_constant", description="Return standard gravity constant from MetPy constants.")
297
+ def get_gravity_constant() -> Dict[str, Any]:
298
+ """
299
+ Fetch standard gravitational acceleration.
300
+
301
+ Parameters:
302
+ None.
303
+
304
+ Returns:
305
+ Dictionary with success/result/error where result is gravity in m/s^2.
306
+ """
307
+ try:
308
+ return _ok(float(g.to("m/s^2").magnitude))
309
+ except Exception as exc:
310
+ return _err(str(exc))
311
+
312
+
313
+ def create_app() -> FastMCP:
314
+ return mcp
MetPy/mcp_output/requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastmcp
2
+ fastapi
3
+ uvicorn[standard]
4
+ pydantic>=2.0.0
5
+ matplotlib>=3.7.0
6
+ numpy>=1.25.0
7
+ pandas>=2.1.0
8
+ pint>=0.22
9
+ pooch>=1.2.0
10
+ pyproj>=3.4.0
11
+ scipy>=1.10.0
12
+ traitlets>=5.1.0
13
+ xarray>=2022.6.0
14
+ packaging
MetPy/mcp_output/start_mcp.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ """
3
+ MCP Service Startup Entry
4
+ """
5
+ import sys
6
+ import os
7
+
8
+ project_root = os.path.dirname(os.path.abspath(__file__))
9
+ mcp_plugin_dir = os.path.join(project_root, "mcp_plugin")
10
+ if mcp_plugin_dir not in sys.path:
11
+ sys.path.insert(0, mcp_plugin_dir)
12
+
13
+ from mcp_service import create_app
14
+
15
+ def main():
16
+ """Start FastMCP service"""
17
+ app = create_app()
18
+ # Use environment variable to configure port, default 8000
19
+ port = int(os.environ.get("MCP_PORT", "8000"))
20
+
21
+ # Choose transport mode based on environment variable
22
+ transport = os.environ.get("MCP_TRANSPORT", "stdio")
23
+ if transport == "http":
24
+ app.run(transport="http", host="0.0.0.0", port=port)
25
+ else:
26
+ # Default to STDIO mode
27
+ app.run()
28
+
29
+ if __name__ == "__main__":
30
+ main()
MetPy/mcp_output/workflow_summary.json ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "repository": {
3
+ "name": "MetPy",
4
+ "url": "https://github.com/Unidata/MetPy",
5
+ "local_path": "/Users/ghh/Documents/Code/Code2MCP-private/workspace/MetPy",
6
+ "description": "Python library",
7
+ "features": "Basic functionality",
8
+ "tech_stack": "Python",
9
+ "stars": 0,
10
+ "forks": 0,
11
+ "language": "Python",
12
+ "last_updated": "",
13
+ "complexity": "medium",
14
+ "intrusiveness_risk": "low"
15
+ },
16
+ "execution": {
17
+ "start_time": 1773278863.8095639,
18
+ "end_time": 1773279187.500253,
19
+ "duration": 323.69068908691406,
20
+ "status": "success",
21
+ "workflow_status": "success",
22
+ "nodes_executed": [
23
+ "download",
24
+ "analysis",
25
+ "env",
26
+ "generate",
27
+ "run",
28
+ "review",
29
+ "finalize"
30
+ ],
31
+ "total_files_processed": 2,
32
+ "environment_type": "unknown",
33
+ "llm_calls": 0,
34
+ "deepwiki_calls": 0
35
+ },
36
+ "tests": {
37
+ "original_project": {
38
+ "passed": false,
39
+ "details": {},
40
+ "test_coverage": "100%",
41
+ "execution_time": 0,
42
+ "test_files": []
43
+ },
44
+ "mcp_plugin": {
45
+ "passed": true,
46
+ "details": {},
47
+ "service_health": "healthy",
48
+ "startup_time": 0,
49
+ "transport_mode": "stdio",
50
+ "fastmcp_version": "unknown",
51
+ "mcp_version": "unknown"
52
+ }
53
+ },
54
+ "analysis": {
55
+ "structure": {
56
+ "packages": [
57
+ "source.benchmarks.benchmarks",
58
+ "source.src.metpy"
59
+ ]
60
+ },
61
+ "dependencies": {
62
+ "has_environment_yml": false,
63
+ "has_requirements_txt": false,
64
+ "pyproject": true,
65
+ "setup_cfg": true,
66
+ "setup_py": false
67
+ },
68
+ "entry_points": {
69
+ "imports": [],
70
+ "cli": [],
71
+ "modules": []
72
+ },
73
+ "risk_assessment": {
74
+ "import_feasibility": 0.91,
75
+ "intrusiveness_risk": "low",
76
+ "complexity": "medium"
77
+ },
78
+ "deepwiki_analysis": {
79
+ "repo_url": "https://github.com/Unidata/MetPy",
80
+ "repo_name": "MetPy",
81
+ "error": "DeepWiki analysis failed",
82
+ "model": "gpt-5.3-codex",
83
+ "source": "llm_direct_analysis",
84
+ "success": false
85
+ },
86
+ "code_complexity": {
87
+ "cyclomatic_complexity": "medium",
88
+ "cognitive_complexity": "medium",
89
+ "maintainability_index": 75
90
+ },
91
+ "security_analysis": {
92
+ "vulnerabilities_found": 0,
93
+ "security_score": 85,
94
+ "recommendations": []
95
+ }
96
+ },
97
+ "plugin_generation": {
98
+ "files_created": [
99
+ "mcp_output/start_mcp.py",
100
+ "mcp_output/mcp_plugin/__init__.py",
101
+ "mcp_output/mcp_plugin/mcp_service.py",
102
+ "mcp_output/mcp_plugin/adapter.py",
103
+ "mcp_output/mcp_plugin/main.py",
104
+ "mcp_output/requirements.txt",
105
+ "mcp_output/README_MCP.md"
106
+ ],
107
+ "main_entry": "start_mcp.py",
108
+ "requirements": [
109
+ "fastmcp>=0.1.0",
110
+ "pydantic>=2.0.0"
111
+ ],
112
+ "readme_path": "/Users/ghh/Documents/Code/Code2MCP-private/workspace/MetPy/mcp_output/README_MCP.md",
113
+ "adapter_mode": "import",
114
+ "total_lines_of_code": 0,
115
+ "generated_files_size": 0,
116
+ "tool_endpoints": 0,
117
+ "supported_features": [
118
+ "Basic functionality"
119
+ ],
120
+ "generated_tools": [
121
+ "Basic tools",
122
+ "Health check tools",
123
+ "Version info tools"
124
+ ]
125
+ },
126
+ "code_review": {},
127
+ "errors": [],
128
+ "warnings": [],
129
+ "recommendations": [
130
+ "Add a minimal MCP smoke test suite (server startup + 2–3 representative endpoints like `cape_cin`",
131
+ "`vorticity`",
132
+ "`parse_metar_file`) into CI",
133
+ "Define and enforce endpoint input/output schemas with Pydantic models (units",
134
+ "dimensionality",
135
+ "and array shape validation) to reduce runtime ambiguity",
136
+ "Introduce tiered endpoint sets (core-safe vs heavy-optional) so `plots`/`cartopy`/radar features are conditionally enabled based on installed extras",
137
+ "Add structured error mapping in the adapter layer (ValueError/UnitError/File I/O → stable MCP error codes/messages) for client reliability",
138
+ "Create regression tests for unit-handling edge cases (Pint quantities",
139
+ "masked arrays",
140
+ "xarray coordinates) across exposed endpoints",
141
+ "Pin and document compatibility matrix for Python + numpy/scipy/pint/xarray in `mcp_output/README_MCP.md`",
142
+ "Add benchmark gates for high-cost functions (`cape_cin`",
143
+ "`interpolate_to_grid`",
144
+ "`q_vector`) and track drift with existing ASV patterns",
145
+ "Improve import resilience by lazy-loading heavy modules (`plots`",
146
+ "`io.nexrad`) and emitting clear dependency-missing hints",
147
+ "Add contract tests to verify generated endpoint names map correctly to callable objects (especially class wrappers like `level2file`",
148
+ "`skewt`",
149
+ "accessors)",
150
+ "Provide example client payloads/responses for 5–10 common workflows in MCP README to reduce integration errors",
151
+ "Add observability hooks (request timing",
152
+ "endpoint success/failure counts",
153
+ "exception taxonomy) for MCP runtime diagnostics",
154
+ "Enforce static checks on plugin code (ruff/mypy/pyright) and align formatting/linting with repository standards",
155
+ "Add security hardening for file-based endpoints (`parse_metar_file`",
156
+ "radar readers): path allowlists",
157
+ "size limits",
158
+ "and timeout controls",
159
+ "Create golden-data tests using existing `staticdata/` and `tests/remote/fixtures/` to validate deterministic MCP outputs",
160
+ "Add release automation for the MCP artifact (versioning tied to MetPy tag/commit + changelog of exposed endpoints)",
161
+ "Document deprecation policy for endpoint names/arguments to prevent breaking downstream MCP clients"
162
+ ],
163
+ "performance_metrics": {
164
+ "memory_usage_mb": 0,
165
+ "cpu_usage_percent": 0,
166
+ "response_time_ms": 0,
167
+ "throughput_requests_per_second": 0
168
+ },
169
+ "deployment_info": {
170
+ "supported_platforms": [
171
+ "Linux",
172
+ "Windows",
173
+ "macOS"
174
+ ],
175
+ "python_versions": [
176
+ "3.8",
177
+ "3.9",
178
+ "3.10",
179
+ "3.11",
180
+ "3.12"
181
+ ],
182
+ "deployment_methods": [
183
+ "Docker",
184
+ "pip",
185
+ "conda"
186
+ ],
187
+ "monitoring_support": true,
188
+ "logging_configuration": "structured"
189
+ },
190
+ "execution_analysis": {
191
+ "success_factors": [
192
+ "Workflow completed end-to-end with status=success across all planned nodes (download, analysis, env, generate, run, review, finalize).",
193
+ "Import-based adapter strategy was feasible (import_feasibility ~0.91) and aligned with low intrusiveness risk.",
194
+ "Generated MCP plugin started healthy over stdio and passed plugin-level validation.",
195
+ "Repository structure and module discovery were strong enough to expose a broad endpoint set from high-value MetPy modules.",
196
+ "No runtime errors or warnings were reported by the orchestration pipeline."
197
+ ],
198
+ "failure_reasons": [
199
+ "Original project tests were not executed successfully (passed=false) despite reported 100% coverage metadata, indicating test execution/collection inconsistency.",
200
+ "DeepWiki analysis failed, reducing external architectural/context enrichment.",
201
+ "Telemetry quality is weak (0 LOC, 0 file size, 0 performance metrics) and prevents evidence-based quality/performance validation.",
202
+ "Generated tool_endpoints reported as 0 in one section, while service endpoints list is non-empty, indicating reporting/schema inconsistency."
203
+ ],
204
+ "overall_assessment": "good",
205
+ "node_performance": {
206
+ "download_time": "Completed successfully; repo imported via zip fallback (262 files). No per-node timing available, so exact duration cannot be isolated.",
207
+ "analysis_time": "Successful static/LLM-assisted analysis with medium complexity and maintainability index 75; DeepWiki substep failed.",
208
+ "generation_time": "Generation completed with 7 files and import adapter mode; endpoint exposure appears extensive, but generation telemetry is inconsistent.",
209
+ "test_time": "Plugin health checks passed; original project test status failed/not run meaningfully. Test evidence is partial."
210
+ },
211
+ "resource_usage": {
212
+ "memory_efficiency": "Unknown in practice; reported memory_usage_mb=0 indicates missing instrumentation rather than zero usage.",
213
+ "cpu_efficiency": "Unknown in practice; cpu_usage_percent=0 suggests metrics were not captured.",
214
+ "disk_usage": "Generated artifact footprint reported as 0, likely telemetry defect; actual disk usage is non-zero given created files."
215
+ }
216
+ },
217
+ "technical_quality": {
218
+ "code_quality_score": 74,
219
+ "architecture_score": 80,
220
+ "performance_score": 62,
221
+ "maintainability_score": 72,
222
+ "security_score": 85,
223
+ "scalability_score": 70
224
+ }
225
+ }
MetPy/source/.DS_Store ADDED
Binary file (6.15 kB). View file
 
MetPy/source/.codecov.yml ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ coverage:
2
+ status:
3
+ patch:
4
+ default:
5
+ target: '80'
6
+ project:
7
+ library:
8
+ target: auto
9
+ threshold: 0.1%
10
+ paths:
11
+ - "src/metpy/.*"
12
+
13
+ tests:
14
+ target: 100%
15
+ paths:
16
+ - "tests/.*"
17
+ - "src/metpy/testing.py"
18
+
19
+ notify:
20
+ gitter:
21
+ default:
22
+ url: "https://webhooks.gitter.im/e/301b8fd7792e1a48b034"
23
+
24
+ comment: off
MetPy/source/.codespellexclude ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ r'^(z|lv_|bottom_top|sigma|h(ei)?ght|altitude|depth|isobaric|pres|isotherm)'
2
+ | ``(thta, u, v, dx, dy, dim_order='yx')`` |
3
+ Changed signature from ``(thta, u, v, dx, dy, dim_order='yx')``
4
+ dpres = gempak.PRES.values
5
+ pres = 1
6
+ thta = 2
7
+ lambda grid: grid if grid.PARM in parameter else False,
8
+ col_head.SELV,
9
+ 'SELV': col_head.SELV,
10
+ col_head.SELV,
11
+ row_head.SELV,
12
+ 'SELV': row_head.SELV,
13
+ 'SELV': col_head.SELV,
14
+ # GFS, NAM, RAP, or other gridded dataset (e.g., NARR).
15
+ # This attribute can be set to False if the vector components are grid relative (e.g., for NAM
16
+ components that are earth-relative. The primary exception is NAM output with wind
17
+ col_head.SELV,
18
+ row_head.SELV,
19
+ by the archive (currently FOUR, PANG, GRAP, AURO), or the known names (
MetPy/source/.codespellignore ADDED
@@ -0,0 +1 @@
 
 
1
+ trough
MetPy/source/.coveragerc ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ [paths]
2
+ source =
3
+ src/
4
+ /*/site-packages
5
+
6
+ [run]
7
+ source = tests
8
+ source_pkgs = metpy
9
+ omit =
10
+ src/metpy/io/_nexrad_msgs/parse_spec.py
MetPy/source/.devcontainer/Dockerfile ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ FROM mcr.microsoft.com/vscode/devcontainers/miniconda:latest
2
+
3
+ COPY ci/*.txt ci-dev/*.txt /tmp/conda-tmp/
4
+ RUN sed -i -e "s/scipy==.*/scipy==1.5.3/" /tmp/conda-tmp/requirements.txt
5
+ RUN /opt/conda/bin/conda config --add channels conda-forge
6
+ RUN /opt/conda/bin/conda config --set channel_priority strict
7
+ RUN /opt/conda/bin/conda install --quiet --yes --file /tmp/conda-tmp/test_requirements.txt --file /tmp/conda-tmp/extra_requirements.txt --file /tmp/conda-tmp/requirements.txt --file /tmp/conda-tmp/linting_requirements.txt --file /tmp/conda-tmp/doc_requirements.txt
MetPy/source/.devcontainer/devcontainer.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "Miniconda (Python 3)",
3
+ "build": {
4
+ "context": "..",
5
+ "dockerfile": "Dockerfile",
6
+ "args": {
7
+ "INSTALL_NODE": "false"
8
+ }
9
+ },
10
+
11
+ // Set *default* container specific settings.json values on container create.
12
+ "settings": {
13
+ "terminal.integrated.shell.linux": "/bin/bash",
14
+ "python.pythonPath": "/opt/conda/bin/python",
15
+ "python.languageServer": "Pylance",
16
+ "python.linting.enabled": true,
17
+ "python.linting.pylintEnabled": true,
18
+ "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
19
+ "python.formatting.blackPath": "/usr/local/py-utils/bin/black",
20
+ "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
21
+ "python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
22
+ "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
23
+ "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
24
+ "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
25
+ "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
26
+ "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
27
+ },
28
+
29
+ // Add the IDs of extensions you want installed when the container is created.
30
+ "extensions": [
31
+ "ms-python.python",
32
+ "ms-python.vscode-pylance"
33
+ ],
34
+
35
+ // Use 'forwardPorts' to make a list of ports inside the container available locally.
36
+ // "forwardPorts": [],
37
+
38
+ // Use 'postCreateCommand' to run commands after the container is created.
39
+ "postCreateCommand": "python -m pip install --no-deps .",
40
+
41
+ // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
42
+ "remoteUser": "vscode"
43
+ }
MetPy/source/.lgtm.yml ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ path_classifiers:
2
+ generated:
3
+ - src/metpy/io/_metar_parser.py
4
+ library:
5
+ - src/metpy/deprecation.py
6
+ test:
7
+ - exclude: src/metpy/testing.py
MetPy/source/.mailmap ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ John Leeman <kd5wxb@gmail.com>
2
+ John Leeman <kd5wxb@gmail.com> <john@leemangeophysical.com>
3
+ <rmay@ucar.edu> <rmay31@gmail.com>
4
+ <sarms@ucar.edu> <sarms@unidata.ucar.edu>
5
+ Kevin Goebbert <kevin.goebbert@valpo.edu>
6
+ <kevin.goebbert@valpo.edu> <kevin.goebbert@gmail.com>
7
+ <kevin.goebbert@valpo.edu> <kgoebber@users.noreply.github.com>
8
+ Tyler Wixtrom <tjwixtrom@gmail.com> <twixtrom@candy.unidata.ucar.edu>
9
+ Tyler Wixtrom <tjwixtrom@gmail.com>
10
+ Brian Mapes <mapes@miami.edu>
11
+ Kristen Pozsonyi <knpozson@millersville.edu>
12
+ Daryl Herzmann <akrherz@iastate.edu>
13
+ Kishan Mehta <kishan@mobifly.co.uk>
14
+ Matt Wilson <matthew.wilson@valpo.edu>
15
+ Andrew Huang <ahuang11@illinois.edu>
16
+ <ahuang11@illinois.edu> <huang.andrew12@gmail.com>
17
+ Andrew Huang <ahuang11@illinois.edu> Andrew <15331990+ahuang11@users.noreply.github.com>
18
+ <tsupinie@gmail.com> <tsupinie@ou.edu>
19
+ Tim Supinie <tsupinie@gmail.com>
20
+ Zach Bruick <zachary.bruick@valpo.edu>
21
+ Joy Monteiro <joy.merwin@gmail.com>
22
+ Steven Decker <sgdecker@envsci.rutgers.edu>
23
+ <sgdecker@envsci.rutgers.edu> <decker@envsci.rutgers.edu>
24
+ Claude Dicaire <31997745+eliteuser26@users.noreply.github.com>
25
+ David Ahijevych <ahijevyc@ucar.edu>
26
+ William Minchin <w_minchin@hotmail.com> MinchinWeb
MetPy/source/.markdownlint.yaml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ default: true
2
+
3
+ extends: null
4
+
5
+ MD013:
6
+ line_length: 95
MetPy/source/.mdl_style.rb ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ all
2
+ rule 'MD013', :line_length => 95
3
+ rule 'MD026', :punctuation => ".,;:!"
4
+ exclude_rule 'MD002'
5
+ exclude_rule 'MD034'
6
+ exclude_rule 'MD041'
MetPy/source/.mdlrc ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ style '.mdl_style.rb'
2
+ ignore_front_matter true
MetPy/source/.qlty/qlty.toml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ config_version = "0"
2
+
3
+ exclude_patterns = [
4
+ "src/metpy/io/_nexrad_msgs/msg*.py",
5
+ "src/metpy/io/_metar_parser*"
6
+ ]
7
+ test_patterns = ["tests/**/*.py"]
8
+
9
+ [[source]]
10
+ name = "default"
11
+ default = true
12
+
13
+ [smells.function_parameters]
14
+ threshold = 10
15
+
16
+ [smells.similar_code]
17
+ enabled = false
MetPy/source/.stickler.yml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ linters:
2
+ flake8:
3
+ ignore: 'F405'
4
+ max-line-length: 95
MetPy/source/AUTHORS.txt ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The following are the contributors of code within MetPy:
2
+ Aaron Hill
3
+ Abby Kenyon
4
+ Alex Haberlie
5
+ Andrew Huang
6
+ Assela Pathirana
7
+ Brian Mapes
8
+ Bryan Guarente
9
+ Claude Dicaire
10
+ Dan Dawson
11
+ Daniel Adriaansen
12
+ Daryl Herzmann
13
+ David Ahijevych
14
+ David Lawrence
15
+ Denis Sergeev
16
+ Drew Camron
17
+ Eric Bruning
18
+ Joern Ungermann
19
+ John Leeman
20
+ Jon Thielen
21
+ Jonathan Helmus
22
+ Joy Monteiro
23
+ Kevin Goebbert
24
+ Kishan Mehta
25
+ Kristen Pozsonyi
26
+ kstilwell
27
+ Leonardo Uieda
28
+ LProx2020
29
+ Lucas Sterzinger
30
+ Matt Wilson
31
+ Max Grover
32
+ Michael James
33
+ mmorello1
34
+ Nathan Wendt
35
+ Patrick Marsh
36
+ Raul Mendez
37
+ Rich Signell
38
+ Russell Manser
39
+ Ryan May
40
+ Sean Arms
41
+ Shawn Murdzek
42
+ Stefan Hofer
43
+ Steven Decker
44
+ Tim Supinie
45
+ TJ Turnage
46
+ Tyler Wixtrom
47
+ UCAR/Unidata
48
+ Vardan Nadkarni
49
+ Warren Pettee
50
+ Will Holmgren
51
+ William Minchin
52
+ Zach Bruick
MetPy/source/CITATION.cff ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ abstract: >
2
+ MetPy is an open-source and community-driven Python package for meteorology designed to fit well within the scientific Python stack (numpy, matplotlib, etc.).
3
+ Its goal is to bring the scripted weather analysis capabilities of GEMPAK (and tools like it) to the powerful scientific Python ecosystem.
4
+ The guiding principle is to make MetPy easy to use with any dataset that can be read into Python.
5
+ MetPy's general functionality breaks down into reading data, meteorological calculations, and meteorology-specific plotting.
6
+ authors:
7
+ - family-names: May
8
+ given-names: Ryan
9
+ affiliation: "UCAR/Unidata"
10
+ orcid: "https://orcid.org/0000-0003-2907-038X"
11
+ - family-names: Arms
12
+ given-names: Sean
13
+ affiliation: "UCAR/Unidata"
14
+ orcid: "https://orcid.org/0000-0001-9835-113X"
15
+ - family-names: Marsh
16
+ given-names: Patrick
17
+ - family-names: Bruning
18
+ given-names: Eric
19
+ affiliation: "Texas Tech University"
20
+ orcid: "https://orcid.org/0000-0003-1959-442X"
21
+ - family-names: Leeman
22
+ given-names: John
23
+ orcid: "https://orcid.org/0000-0002-3624-1821"
24
+ - family-names: Goebbert
25
+ given-names: Kevin
26
+ affiliation: "Valparaiso University"
27
+ orcid: "https://orcid.org/0000-0001-7559-2432"
28
+ - family-names: Thielen
29
+ given-names: Jonathan
30
+ orcid: "https://orcid.org/0000-0002-5479-0189"
31
+ - family-names: Bruick
32
+ given-names: Zachary
33
+ orcid: "https://orcid.org/0000-0002-0299-9845"
34
+ - family-names: Camron
35
+ given-names: "M. Drew"
36
+ affiliation: "UCAR/Unidata"
37
+ orcid: "https://orcid.org/0000-0001-7246-6502"
38
+ cff-version: "1.2.0"
39
+ contact:
40
+ - name: Unidata
41
+ city: Boulder
42
+ region: Colorado
43
+ country: US
44
+ email: "support@unidata.ucar.edu"
45
+ tel: "303-497-8643"
46
+ fax: "303-497-8690"
47
+ website: "https://www.unidata.ucar.edu"
48
+ doi: "10.5065/D6WW7G29"
49
+ keywords:
50
+ - meteorology
51
+ - weather
52
+ license: "BSD-3-Clause"
53
+ message: "If you use or contribute to MetPy, please use this information to reference it."
54
+ repository-code: "https://github.com/Unidata/MetPy"
55
+ title: "MetPy: A Python Package for Meteorological Data"
56
+ type: software
57
+ url: "https://www.unidata.ucar.edu/software/metpy/"
58
+ references:
59
+ - type: grant
60
+ authors:
61
+ - family-names: Ramamurthy
62
+ given-names: Mohan
63
+ email: "mohan@ucar.edu"
64
+ date-released: 2014-04-01
65
+ institution:
66
+ name: "National Science Foundation"
67
+ identifiers:
68
+ - description: "NSF award number."
69
+ type: other
70
+ value: "AGS-1344155"
71
+ title: "Unidata 2018: Transforming Geoscience through Innovative Data Services"
72
+ url: "https://www.nsf.gov/awardsearch/showAward?AWD_ID=1344155"
73
+ - type: grant
74
+ authors:
75
+ - family-names: May
76
+ given-names: Ryan
77
+ email: "rmay@ucar.edu"
78
+ - family-names: Goebbert
79
+ given-names: Kevin
80
+ - family-names: Leeman
81
+ given-names: John
82
+ date-released: 2017-09-01
83
+ institution:
84
+ name: "National Science Foundation"
85
+ identifiers:
86
+ - description: "NSF award number."
87
+ type: other
88
+ value: "OAC-1740315"
89
+ title: "MetPy - A Python GEMPAK Replacement for Meteorological Data Analysis"
90
+ url: "https://www.nsf.gov/awardsearch/showAward?AWD_ID=1740315"
91
+ - type: grant
92
+ authors:
93
+ - family-names: Paul
94
+ given-names: Kevin
95
+ email: "kpaul@ucar.edu"
96
+ - family-names: May
97
+ given-names: Ryan
98
+ - family-names: Hamman
99
+ given-names: Joseph
100
+ - family-names: "Del Vento"
101
+ given-names: Davide
102
+ date-released: 2017-08-21
103
+ institution:
104
+ name: "National Science Foundation"
105
+ identifiers:
106
+ - description: "NSF award number."
107
+ type: other
108
+ value: "OCE-1740633"
109
+ title: "Pangeo: An Open Source Big Data Climate Science Platform"
110
+ url: "https://www.nsf.gov/awardsearch/showAward?AWD_ID=1740633"
111
+ - type: grant
112
+ authors:
113
+ - family-names: Ramamurthy
114
+ given-names: Mohan
115
+ email: "mohan@ucar.edu"
116
+ date-released: 2019-05-01
117
+ institution:
118
+ name: "National Science Foundation"
119
+ identifiers:
120
+ - description: "NSF award number."
121
+ type: other
122
+ value: "AGS-1901712"
123
+ title: "Unidata: Next-generation Data Services and Workflows to Advance Geoscience Research and Education"
124
+ url: "https://www.nsf.gov/awardsearch/showAward?AWD_ID=1901712"
125
+ - type: grant
126
+ authors:
127
+ - family-names: May
128
+ given-names: Ryan
129
+ email: "rmay@ucar.edu"
130
+ - family-names: Goebbert
131
+ given-names: Kevin
132
+ - family-names: Camron
133
+ given-names: Michael
134
+ date-released: 2021-05-01
135
+ institution:
136
+ name: "National Science Foundation"
137
+ identifiers:
138
+ - description: "NSF award number."
139
+ type: other
140
+ value: "OAC-2103682"
141
+ title: "Elements: Scaling MetPy to Big Data Workflows in Meteorology and Climate Science"
142
+ url: "https://www.nsf.gov/awardsearch/showAward?AWD_ID=2103682"
MetPy/source/CLA.md ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # About the Contributor License Agreement
2
+
3
+ Everybody who contributes code to MetPy is going to be asked to sign a
4
+ Contributor License Agreement (CLA). MetPy's CLA comes from
5
+ [Project Harmony](http://www.harmonyagreements.org), which is a
6
+ community-centered group focused on contributor agreements for free and open
7
+ source software.
8
+
9
+ The document you are reading now is not a legal analysis of the CLA. If you
10
+ want one of those, please talk to your lawyer. This is a description of the
11
+ purpose of the CLA.
12
+
13
+ ## Frequently Asked Questions (FAQ)
14
+
15
+ ### Why is a signed CLA required?
16
+
17
+ The license agreement is a legal document in which you state you are entitled to
18
+ contribute the code/documentation to MetPy and are willing to have it used in
19
+ distributions and derivative works. This means that should there be any kind of
20
+ legal issue in the future as to the origins and ownership of any particular
21
+ piece of code, we have the necessary forms on file from the contributor(s)
22
+ saying they were permitted to make this contribution.
23
+
24
+ The CLA also ensures that once you have provided a contribution, you cannot try
25
+ to withdraw permission for its use at a later date. People and companies can
26
+ therefore use MetPy, confident that they will not be asked to stop using pieces
27
+ of the code at a later date.
28
+
29
+ Lastly, the CLA gives the MetPy project permission to change the license under
30
+ which the project, including the various contributions from many developers, is
31
+ distributed in the future. The CLA states that this license needs to be one
32
+ that has been approved by the Open Source Initiative, including both copyleft
33
+ and permissive licenses. This just gives the freedom to adjust licenses in the
34
+ future if needed (e.g. some clause of the current license is found to be invalid).
35
+
36
+ ### Am I giving away the copyright to my contributions?
37
+
38
+ No. This is a pure license agreement, not a copyright assignment. You still
39
+ maintain the full copyright for your contributions. You are only providing a
40
+ license to MetPy to distribute your code without further restrictions. This is
41
+ not the case for all CLA's, but it is the case for the one we are using.
42
+
43
+ ### What about if I do MetPy development as part of my job?
44
+
45
+ If any of your contributions to MetPy are created as part of your employment by
46
+ somebody else, the work might not actually belong to you. It may be owned by
47
+ your employer. In that case, your employer, or somebody able to represent the
48
+ company as far as licensing goes, needs to sign the corporate version of the
49
+ Contributor Licensing Agreement in order for that contribution to be accepted
50
+ into MetPy. They will need to include the names of any developers (you and any
51
+ others covered by that agreement) who are able to submit contributions on
52
+ behalf of the employer. That list can be updated as new people are employed or
53
+ others leave.
54
+
55
+ You should also still sign an individual CLA in that case. Not all the work you
56
+ do will necessarily belong to your employer and we still need permission to
57
+ license your individual contributions.
58
+
59
+ If you have signed an individual CLA, but not a corporate one, be very careful
60
+ about submitting contributions you have made. We cannot accept anything that you
61
+ do not have the rights to license in the first place and that includes code that
62
+ belongs to your employer. Similarly, if you are a consultant who may be creating
63
+ MetPy patches as part of a job, make sure you and your employer understand who
64
+ owns the rights to the code. Only submit things that you are entitled to. The
65
+ CLA is a legal declaration by you that you have the right to grant such a
66
+ license for your contributions. It is up to you to make sure that is true.
67
+
68
+ ### Can I withdraw permission to use my contributions at a later date?
69
+
70
+ No. This is one of the reasons we require a CLA. No individual contributor can hold
71
+ such a threat over the entire community of users. Once you make a contribution, you
72
+ are saying we can use that piece of code forever.
73
+
74
+ ### Can I submit patches without having signed the CLA?
75
+
76
+ No. We will be asking all new contributors and patch submitters to sign before
77
+ they submit anything.
78
+
79
+ Based on material Copyright Django Software Foundation. [CC-BY](http://creativecommons.org/licenses/by/3.0/us/)
80
+ Modified slightly to reflect MetPy.
81
+
82
+ ## UCAR/Unidata Individual Contributor License Agreement
83
+
84
+ Thank you for your interest in contributing to UCAR/Unidata ("We" or "Us"). This contributor
85
+ agreement ("Agreement") documents the rights granted by contributors to Us. To make this
86
+ document effective, please sign it and send it to Us by electronic submission on
87
+ https://cla-assistant.io. This is a legally binding document, so please read it carefully
88
+ before agreeing to it. The Agreement may cover more than one software project managed by Us.
89
+
90
+ (1) Definitions
91
+
92
+ "You" means the individual who Submits a Contribution to Us.
93
+
94
+ "Contribution" means any work of authorship that is Submitted by You to Us in which You own or
95
+ assert ownership of the Copyright.
96
+
97
+ "Copyright" means all rights protecting works of authorship owned or controlled by You,
98
+ including copyright, moral and neighboring rights, as appropriate, for the full term of their
99
+ existence including any extensions by You.
100
+
101
+ "Material" means the work of authorship which is made available by Us to third parties. When
102
+ this Agreement covers more than one software project, the Material means the work of authorship
103
+ to which the Contribution was Submitted. After You Submit the Contribution, it may be included
104
+ in the Material.
105
+
106
+ "Submit" means any form of electronic, verbal, or written communication sent to Us or our
107
+ representatives, including but not limited to electronic mailing lists, source code control
108
+ systems, and issue tracking systems that are managed by, or on behalf of, Us for the purpose of
109
+ discussing and improving the Material, but excluding communication that is conspicuously marked
110
+ or otherwise designated in writing by You as "Not a Contribution."
111
+
112
+ "Submission Date" means the date on which You Submit a Contribution to Us.
113
+
114
+ "Effective Date" means the date You execute this Agreement or the date You first Submit a
115
+ Contribution to Us, whichever is earlier.
116
+
117
+ (2) Grant of Rights
118
+
119
+ (2.1) Copyright License
120
+
121
+ (a) You retain ownership of the Copyright in Your Contribution and have the same rights to use
122
+ or license the Contribution which You would have had without entering into the Agreement.
123
+
124
+ (b) To the maximum extent permitted by the relevant law, You grant to Us a perpetual,
125
+ worldwide, non-exclusive, transferable, royalty-free, irrevocable license under the Copyright
126
+ covering the Contribution, with the right to sublicense such rights through multiple tiers of
127
+ sublicensees, to reproduce, modify, display, perform and distribute the Contribution as part of
128
+ the Material; provided that this license is conditioned upon compliance with Section 2.3.
129
+
130
+ (2.2) Patent License
131
+
132
+ For patent claims including, without limitation, method, process, and apparatus claims which
133
+ You own, control or have the right to grant, now or in the future, You grant to Us a perpetual,
134
+ worldwide, non-exclusive, transferable, royalty-free, irrevocable patent license, with the
135
+ right to sublicense these rights to multiple tiers of sublicensees, to make, have made, use,
136
+ sell, offer for sale, import and otherwise transfer the Contribution and the Contribution in
137
+ combination with the Material (and portions of such combination). This license is granted only
138
+ to the extent that the exercise of the licensed rights infringes such patent claims; and
139
+ provided that this license is conditioned upon compliance with Section 2.3.
140
+
141
+ (2.3) Outbound License
142
+
143
+ As a condition on the grant of rights in Sections 2.1 and 2.2, We agree to license the
144
+ Contribution only under the terms of the license or licenses which We are using on the
145
+ Submission Date for the Material or any licenses which are approved by the Open Source
146
+ Initiative on or after the Effective Date, including both permissive and copyleft licenses,
147
+ whether or not such licenses are subsequently disapproved (including any right to adopt any
148
+ future version of a license if permitted).
149
+
150
+ (2.4) Moral Rights. If moral rights apply to the Contribution, to the maximum extent permitted by
151
+ law, You waive and agree not to assert such moral rights against Us or our successors in
152
+ interest, or any of our licensees, either direct or indirect.
153
+
154
+ (2.5) Our Rights. You acknowledge that We are not obligated to use Your Contribution as part of
155
+ the Material and may decide to include any Contribution We consider appropriate.
156
+
157
+ (2.6) Reservation of Rights. Any rights not expressly licensed under this section are expressly
158
+ reserved by You.
159
+
160
+ (3) Agreement
161
+
162
+ You confirm that:
163
+
164
+ (a) You have the legal authority to enter into this Agreement.
165
+
166
+ (b) You own the Copyright and patent claims covering the Contribution which are required to
167
+ grant the rights under Section 2.
168
+
169
+ (c) The grant of rights under Section 2 does not violate any grant of rights which You have
170
+ made to third parties, including Your employer. If You are an employee, You have had Your
171
+ employer approve this Agreement or sign the Entity version of this document. If You are less
172
+ than eighteen years old, please have Your parents or guardian sign the Agreement.
173
+
174
+ (4) Disclaimer
175
+
176
+ EXCEPT FOR THE EXPRESS WARRANTIES IN SECTION 3, THE CONTRIBUTION IS PROVIDED "AS IS". MORE
177
+ PARTICULARLY, ALL EXPRESS OR IMPLIED WARRANTIES INCLUDING, WITHOUT LIMITATION, ANY IMPLIED
178
+ WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE
179
+ EXPRESSLY DISCLAIMED BY YOU TO US. TO THE EXTENT THAT ANY SUCH WARRANTIES CANNOT BE DISCLAIMED,
180
+ SUCH WARRANTY IS LIMITED IN DURATION TO THE MINIMUM PERIOD PERMITTED BY LAW.
181
+
182
+ (5) Consequential Damage Waiver
183
+
184
+ TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU BE LIABLE FOR ANY LOSS
185
+ OF PROFITS, LOSS OF ANTICIPATED SAVINGS, LOSS OF DATA, INDIRECT, SPECIAL, INCIDENTAL,
186
+ CONSEQUENTIAL AND EXEMPLARY DAMAGES ARISING OUT OF THIS AGREEMENT REGARDLESS OF THE LEGAL OR
187
+ EQUITABLE THEORY (CONTRACT, TORT OR OTHERWISE) UPON WHICH THE CLAIM IS BASED.
188
+
189
+ (6) Miscellaneous
190
+
191
+ (6.1) This Agreement will be governed by and construed in accordance with the laws of the state
192
+ of Colorado, excluding its conflicts of law provisions. Under certain circumstances, the
193
+ governing law in this section might be superseded by the United Nations Convention on Contracts
194
+ for the International Sale of Goods ("UN Convention") and the parties intend to avoid the
195
+ application of the UN Convention to this Agreement and, thus, exclude the application of the UN
196
+ Convention in its entirety to this Agreement.
197
+
198
+ (6.2) This Agreement sets out the entire agreement between You and Us for Your Contributions to
199
+ Us and overrides all other agreements or understandings.
200
+
201
+ (6.3) If You or We assign the rights or obligations received through this Agreement to a third
202
+ party, as a condition of the assignment, that third party must agree in writing to abide by all
203
+ the rights and obligations in the Agreement.
204
+
205
+ (6.4) The failure of either party to require performance by the other party of any provision of
206
+ this Agreement in one situation shall not affect the right of a party to require such
207
+ performance at any time in the future. A waiver of performance under a provision in one
208
+ situation shall not be considered a waiver of the performance of the provision in the future or
209
+ a waiver of the provision in its entirety.
210
+
211
+ (6.5) If any provision of this Agreement is found void and unenforceable, such provision will
212
+ be replaced to the extent possible with a provision that comes closest to the meaning of the
213
+ original provision and which is enforceable. The terms and conditions set forth in this
214
+ Agreement shall apply notwithstanding any failure of essential purpose of this Agreement or any
215
+ limited remedy to the maximum extent possible under law.
216
+
217
+ Harmony (HA-CLA-I-OSI) Version 1.0
MetPy/source/CODE_OF_CONDUCT.md ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Contributor Covenant Code of Conduct
2
+
3
+ ## Our Pledge
4
+
5
+ In the interest of fostering an open and welcoming environment, we as contributors and
6
+ maintainers pledge to making participation in our project and our community a harassment-free
7
+ experience for everyone, regardless of age, body size, disability, ethnicity, gender identity
8
+ and expression, level of experience, nationality, personal appearance, race, religion, or
9
+ sexual identity and orientation.
10
+
11
+ ## Our Standards
12
+
13
+ Examples of behavior that contributes to creating a positive environment include:
14
+
15
+ * Using welcoming and inclusive language
16
+ * Being respectful of differing viewpoints and experiences
17
+ * Gracefully accepting constructive criticism
18
+ * Focusing on what is best for the community
19
+ * Showing empathy towards other community members
20
+
21
+ Examples of unacceptable behavior by participants include:
22
+
23
+ * The use of sexualized language or imagery and unwelcome sexual attention or advances
24
+ * Trolling, insulting/derogatory comments, and personal or political attacks
25
+ * Public or private harassment
26
+ * Publishing others' private information, such as a physical or electronic address, without
27
+ explicit permission
28
+ * Other conduct which could reasonably be considered inappropriate in a professional setting
29
+
30
+ ## Our Responsibilities
31
+
32
+ Project maintainers are responsible for clarifying the standards of acceptable behavior and are
33
+ expected to take appropriate and fair corrective action in response to any instances of
34
+ unacceptable behavior.
35
+
36
+ Project maintainers have the right and responsibility to remove, edit, or reject comments,
37
+ commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of
38
+ Conduct, or to ban temporarily or permanently any contributor for other behaviors that they
39
+ deem inappropriate, threatening, offensive, or harmful.
40
+
41
+ ## Scope
42
+
43
+ This Code of Conduct applies both within project spaces and in public spaces when an individual
44
+ is representing the project or its community. Examples of representing a project or community
45
+ include using an official project e-mail address, posting via an official social media account,
46
+ or acting as an appointed representative at an online or offline event. Representation of a
47
+ project may be further defined and clarified by project maintainers.
48
+
49
+ ## Enforcement
50
+
51
+ Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
52
+ contacting the project team at support-python@unidata.ucar.edu. The project team will review
53
+ and investigate all complaints, and will respond in a way that it deems appropriate to the
54
+ circumstances. The project team is obligated to maintain confidentiality with regard to the
55
+ reporter of an incident. Further details of specific enforcement policies may be posted
56
+ separately.
57
+
58
+ Project maintainers who do not follow or enforce the Code of Conduct in good faith may face
59
+ temporary or permanent repercussions as determined by other members of the project's
60
+ leadership.
61
+
62
+ ## Attribution
63
+
64
+ This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
65
+ available at [http://contributor-covenant.org/version/1/4][version]
66
+
67
+ [homepage]: http://contributor-covenant.org
68
+ [version]: http://contributor-covenant.org/version/1/4/
MetPy/source/CONTRIBUTING.md ADDED
@@ -0,0 +1,412 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Contributors Guide
2
+
3
+ Interested in helping build MetPy? Have code from your research that you believe others will
4
+ find useful? Have a few minutes to tackle an issue? In this guide we will get you setup and
5
+ integrated into contributing to MetPy!
6
+
7
+ ## Introduction
8
+
9
+ First off, thank you for considering contributing to MetPy. MetPy is community-driven
10
+ project. It's people like you that make MetPy useful and successful. There are many ways
11
+ to contribute, from writing tutorials or examples, improvements to the documentation,
12
+ submitting bug reports and feature requests, or even writing code which can be incorporated
13
+ into MetPy for everyone to use.
14
+
15
+ Following these guidelines helps to communicate that you respect the time of the
16
+ developers managing and developing this open source project. In return, they
17
+ should reciprocate that respect in addressing your issue, assessing changes, and
18
+ helping you finalize your pull requests.
19
+
20
+ So, please take a few minutes to read through this guide and get setup for success with your
21
+ MetPy contributions. We're glad you're here!
22
+
23
+ ## What Can I Do?
24
+
25
+ * Tackle any [issues](https://github.com/Unidata/MetPy/issues) you wish! We have a special
26
+ label for issues that beginners might want to try. Have a look at our
27
+ [current beginner issues.](https://github.com/Unidata/MetPy/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
28
+ Also have a look at if the issue is already assigned to someone - this helps us make sure
29
+ that work is not duplicated if the issue is already being worked on by Unidata Staff.
30
+ * Contribute code you already have. It does not need to be perfect! We will help you clean
31
+ things up, test it, etc.
32
+ * Make a tutorial or example of how to do something.
33
+ * Improve documentation of a feature you found troublesome.
34
+ * File a new issue if you run into problems!
35
+
36
+ ## Ground Rules
37
+
38
+ The goal is to maintain a diverse community that's pleasant for everyone. Please
39
+ be considerate and respectful of others by following our
40
+ [code of conduct](https://github.com/Unidata/MetPy/blob/main/CODE_OF_CONDUCT.md).
41
+
42
+ Other items:
43
+
44
+ * Each pull request should consist of a logical collection of changes. You can
45
+ include multiple bug fixes in a single pull request, but they should be related.
46
+ For unrelated changes, please submit multiple pull requests.
47
+ * Do not commit changes to files that are irrelevant to your feature or bug fix
48
+ (eg: .gitignore).
49
+ * Be willing to accept criticism and work on improving your code; we don't want
50
+ to break other users' code, so care must be taken not to introduce bugs.
51
+ * Be aware that the pull request review process is not immediate, and is
52
+ generally proportional to the size of the pull request.
53
+ * Function arguments:
54
+ * Use full names for parameters rather than symbols (e.g. temperature instead of t)
55
+ * Order: pressure/height -> temperature/wind -> moisture (in general, but not a hard and
56
+ fast rule--like to allow for some default arguments).
57
+
58
+ ## Reporting a bug
59
+
60
+ The easiest way to get involved is to report issues you encounter when using MetPy or by
61
+ requesting something you think is missing.
62
+
63
+ * Head over to the [issues](https://github.com/Unidata/MetPy/issues) page.
64
+ * Search to see if your issue already exists or has even been solved previously.
65
+ * If you indeed have a new issue or request, click the "New Issue" button.
66
+ * Fill in as much of the issue template as is relevant. Please be as specific as possible.
67
+ Include the version of the code you were using, as well as what operating system you
68
+ are running. If possible, include complete, minimal example code that reproduces the problem.
69
+
70
+ ## Setting up your development environment
71
+
72
+ We recommend using the [conda](https://conda.io/docs/) package manager for your Python environments.
73
+ This requires some comfort with the command line and a little ``git`` knowledge.
74
+ Our recommended setup for contributing:
75
+
76
+ Install [miniconda](https://docs.conda.io/en/latest/miniconda.html) on your system.
77
+ You may have to restart your prompt for the remaining steps to work.
78
+
79
+ Install [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
80
+ (link with instructions) on your system if not already available
81
+ (check with ``git --version`` at the command line.)
82
+ This can also be installed from a variety of package managers, including ``conda`` if needed.
83
+
84
+ Login to your [GitHub](https://github.com) account and make a fork of the
85
+ [MetPy repository](https://github.com/unidata/metpy/) by clicking the "Fork" button.
86
+ Clone your fork of the MetPy repository (in terminal on Mac/Linux or git shell/GUI on Windows)
87
+ to the location you'd like to keep it.
88
+ We are partial to creating a ``git_repos`` or ``projects`` directory in our home folder.
89
+
90
+ ```sh
91
+ git clone https://github.com/<your-user-name>/metpy.git
92
+ ```
93
+
94
+ Navigate to that folder in the terminal or in Anaconda Prompt if you're on Windows.
95
+ The remainder of the instructions will take place within this directory.
96
+
97
+ ```sh
98
+ cd metpy
99
+ ```
100
+
101
+ Connect your repository to the upstream (main project).
102
+
103
+ ```sh
104
+ git remote add unidata https://github.com/unidata/metpy.git
105
+ ```
106
+
107
+ Create a new conda environment for us to configure, and give it a name.
108
+ After ``-n`` you can specify any name you'd like; here we've chosen ``devel``.
109
+
110
+ ```sh
111
+ conda create -n devel
112
+ ```
113
+
114
+ **IMPORTANT**: Always activate this environment when developing and testing your changes!
115
+
116
+ ```sh
117
+ conda activate devel
118
+ ```
119
+
120
+ You will have to do this any time you re-open your prompt.
121
+ Currently there are no packages in this environment, let's change that.
122
+ Configure this environment so that we can reach
123
+ [conda-forge](https://conda-forge.org/feedstock-outputs/) for the specific packages we depend on.
124
+
125
+ ```sh
126
+ conda config --env --add channels conda-forge --add channels conda-forge/label/testing
127
+ ```
128
+
129
+ Install the necessary dependency packages from conda-forge.
130
+ Remember that these must be executed within the ``metpy`` directory.
131
+
132
+ ```sh
133
+ conda install --file ci/requirements.txt --file ci/extra_requirements.txt --file ci-dev/test_requirements.txt
134
+ ```
135
+
136
+ Finally, create an editable install of MetPy that will update with your development!
137
+
138
+ ```sh
139
+ pip install -e .
140
+ ```
141
+
142
+ Note sections on [documentation](#documentation) and [code style](#code-style) below,
143
+ where you may need to install a few more packages into your new environment.
144
+
145
+ Now you're all set!
146
+ You have an environment called ``devel`` that you can work in.
147
+ Remember, you will need to activate this environment the
148
+ next time you want to use it after closing the terminal.
149
+ If you want to get back to the root environment, run ``conda deactivate``.
150
+
151
+ ## Pull Requests
152
+
153
+ The changes to the MetPy source (and documentation) should be made via GitHub pull requests
154
+ against ``main``, even for those with administration rights. While it's tempting to
155
+ make changes directly to ``main`` and push them up, it is better to make a pull request so
156
+ that others can give feedback. If nothing else, this gives a chance for the automated tests to
157
+ run on the PR. This can eliminate "brown paper bag" moments with buggy commits on the main
158
+ branch.
159
+
160
+ During the Pull Request process, before the final merge, it's a good idea to rebase the branch
161
+ and squash together smaller commits. It's not necessary to flatten the entire branch, but it
162
+ can be nice to eliminate small fixes and get the merge down to logically arranged commits. This
163
+ can also be used to hide sins from history--this is the only chance, since once it hits
164
+ ``main``, it's there forever!
165
+
166
+ **Working on your first Pull Request?** You can learn how from this *free* video series
167
+ [How to Contribute to an Open Source Project on GitHub](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github),
168
+ Aaron Meurer's [tutorial on the git workflow](https://www.asmeurer.com/git-workflow/), or the
169
+ guide [“How to Contribute to Open Source"](https://opensource.guide/how-to-contribute/).
170
+
171
+ Commit the changes you made. Chris Beams has written a [guide](https://cbea.ms/git-commit/)
172
+ on how to write good commit messages.
173
+
174
+ Push to your fork and [submit a pull request](https://github.com/Unidata/metpy/compare/).
175
+ For the Pull Request to be accepted, you need to agree to the
176
+ MetPy Contributor License Agreement (CLA). This will be handled automatically
177
+ upon submission of a Pull Request.
178
+ See [here](https://github.com/Unidata/MetPy/blob/main/CLA.md) for more
179
+ explanation and rationale behind MetPy's CLA.
180
+
181
+ ## Source Code
182
+
183
+ MetPy's source code is located in the `src/` directory in the root of the repository. Within
184
+ `src/` is the `metpy/` directory, which is the base package. Inside here are the main
185
+ top-level subpackages of MetPy:
186
+
187
+ * `calc`: Calculations and tools
188
+ * `interpolate`: Interpolating data points to other locations
189
+ * `io`: Tools for reading and writing files
190
+ * `plots`: Plotting tools using Matplotlib (and Cartopy)
191
+
192
+ ## Documentation
193
+
194
+ Now that you've made your awesome contribution, it's time to tell the world how to use it.
195
+ Writing documentation strings is really important to make sure others use your functionality
196
+ properly. Didn't write new functions? That's fine, but be sure that the documentation for
197
+ the code you touched is still in great shape. It is not uncommon to find some strange wording
198
+ or clarification that you can take care of while you are here. If you added a new function
199
+ make sure that it gets marked as included if appropriate in the GEMPAK conversion table.
200
+
201
+ You can write examples in the documentation if they are simple concepts to demonstrate. If
202
+ your feature is more complex, consider adding to the examples or tutorials for MetPy.
203
+
204
+ You can build the documentation locally to see how your changes will look.
205
+ After setting up your [development environment](#setting-up-your-development-environment) above,
206
+ from within the ``metpy`` directory with your ``devel`` environment active,
207
+ use ``conda install --file ci-dev/doc_requirements.txt``
208
+ to install required packages to build our documentation.
209
+ Then, still from within your ``devel`` environment,
210
+
211
+ * Navigate to the docs folder ``cd docs``
212
+ * Remove any old builds and build the current docs ``make clean html``
213
+ * (Try ``make cleanall html`` if ``make clean html`` fails)
214
+ * Open ``docs/build/html/index.html`` and see your changes!
215
+
216
+ ### `doc-server.py`
217
+
218
+ The MetPy documentation relies on the
219
+ [Pydata Sphinx Theme](https://pydata-sphinx-theme.readthedocs.io/en/stable/index.html)
220
+ for style and functionality.
221
+ The theme includes some live javascript elements, including the version switcher.
222
+ To test these elements, use our `doc-server.py` to deploy the built docs html files to a local
223
+ server. If testing changes to `pst-versions.json` locally, change
224
+ `html_theme_options['switcher']['json_url']` to reference `/MetPy/pst-versions.json` and the
225
+ builds should pass and reflect any testing changes to `docs/test-server/pst-versions.json`.
226
+ Documentation builds may fail if the links in the json fail to resolve.
227
+ Change `html_theme_options['check_switcher']` to `False` in `conf.py` to bypass this behavior.
228
+ Note: for production, `pst-versions.json` must live and is automatically updated on the online
229
+ MetPy documentation via the `gh-pages` branch on GitHub.
230
+
231
+ ## Tests
232
+
233
+ Unit tests are the lifeblood of the project, as it ensures that we can continue to add and
234
+ change the code and stay confident that things have not broken. Running the tests requires
235
+ ``pytest``, which is easily available through ``conda``, ``pip``, or after installing the
236
+ `test` extras with `uv`. It was also installed if you made our default ``devel``
237
+ environment.
238
+
239
+ ### Running Tests
240
+
241
+ Running the tests can be done by running ``pytest``
242
+
243
+ Running the whole test suite isn't that slow, but can be a burden if you're working on just
244
+ one module or a specific test. It is easy to run tests on a single directory:
245
+
246
+ ```sh
247
+ pytest tests/calc
248
+ ```
249
+
250
+ A specific test can be run as:
251
+
252
+ ```sh
253
+ pytest -k test_my_test_func_name
254
+ ```
255
+
256
+ ### Writing Tests
257
+
258
+ Tests should ideally hit all of the lines of code added or changed. We have automated
259
+ services that can help track down lines of code that are missed by tests. Watching the
260
+ coverage has even helped find sections of dead code that could be removed!
261
+
262
+ Let's say we are adding a simple function to add two numbers and return the result as a float
263
+ or as a string. (This would be a silly function, but go with us here for demonstration
264
+ purposes.)
265
+
266
+ ```python
267
+ def add_as_float_or_string(a, b, as_string=False):
268
+ res = a + b
269
+ if as_string:
270
+ return string(res)
271
+ return res
272
+ ```
273
+
274
+ I can see two easy tests here: one for the results as a float and one for the results as a
275
+ string. If I had added this to the ``calc`` module, I'd add those two tests in
276
+ ``tests/calc/test_calc.py``.
277
+
278
+ ```python
279
+ def test_add_as_float_or_string_defaults():
280
+ res = add_as_float_or_string(3, 4)
281
+ assert(res, 7)
282
+
283
+
284
+ def test_add_as_float_or_string_string_return():
285
+ res = add_as_float_or_string(3, 4, as_string=True)
286
+ assert(res, '7')
287
+ ```
288
+
289
+ There are plenty of more advanced testing concepts, like dealing with floating point
290
+ comparisons, parameterizing tests, testing that exceptions are raised, and more. Have a look
291
+ at the existing tests to get an idea of some of the common patterns.
292
+
293
+ ### Image tests
294
+
295
+ Some tests (for matplotlib plotting code) are done as an image comparison, using the
296
+ pytest-mpl plugin.
297
+ By following the [guide](#setting-up-your-development-environment) above,
298
+ you should have a ``testing`` install of matplotlib, which will guarantee that your image tests
299
+ behave exactly the same as ours.
300
+ To run these tests, use:
301
+
302
+ ```sh
303
+ pytest --mpl
304
+ ````
305
+
306
+ When adding new image comparison tests, start by creating the baseline images for the tests:
307
+
308
+ ```sh
309
+ pytest --mpl-generate-path=baseline
310
+ ```
311
+
312
+ That command runs the tests and saves the images in the ``baseline`` directory.
313
+ For MetPy this is generally ``tests/plots/baseline/``. We recommend using the ``-k`` flag
314
+ to run only the test you just created for this step.
315
+
316
+ For more information, see the [docs for pytest-mpl](https://github.com/astrofrog/pytest-mpl).
317
+
318
+ ## Cached Data Files
319
+
320
+ MetPy keeps some test data, as well as things like shape files for US counties in a data cache
321
+ supported by the pooch library. To add files to this, please ensure they are as small as
322
+ possible. Put the files in the `staticdata` directory. Then run this command in the root of
323
+ the MetPy repository to recreate the data registry:
324
+
325
+ ```sh
326
+ python -c "import pooch; pooch.make_registry('staticdata', 'src/metpy/static-data-manifest.txt')"
327
+ ```
328
+
329
+ Make sure that no system files (like `.DS_Store`) are in the manifest and add it to your
330
+ contribution.
331
+
332
+ ## Code Style
333
+
334
+ MetPy uses the Python code style outlined in [PEP8](https://pep8.org). For better or worse, this
335
+ is what the majority of the Python world uses. The one deviation is that line length limit is
336
+ 95 characters. 80 is a good target, but some times longer lines are needed.
337
+
338
+ While the authors are no fans of blind adherence to style and so-called project "clean-ups"
339
+ that go through and correct code style, MetPy has adopted this style from the outset.
340
+ Therefore, it makes sense to enforce this style as code is added to keep everything clean and
341
+ uniform. To this end, part of the automated testing for MetPy checks style. To check style
342
+ locally within the source directory you can use the [ruff](https://docs.astral.sh/ruff/) and
343
+ [flake8](https://flake8.pycqa.org/en/latest/) tools.
344
+ After setting up your [development environment](#setting-up-your-development-environment) above,
345
+ from within the ``metpy`` directory with your ``devel`` environment active,
346
+ install the code style tools we use with
347
+ ``conda install --file ci-dev/linting_requirements.txt``. Checking your code style is then as
348
+ easy as running ``ruff check . ; flake8 .`` in the base of the repository.
349
+
350
+ You can also just submit your PR and the kind robots will comment on all style violations as
351
+ well. It can be a pain to make sure you have the right number of spaces around things, imports
352
+ in order, and all of the other nits that the bots will find. It is very important though as
353
+ this consistent style helps us keep MetPy readable, maintainable, and uniform.
354
+
355
+ ## What happens after the pull request
356
+
357
+ You've make your changes, documented them, added some tests, and submitted a pull request.
358
+ What now?
359
+
360
+ ### Automated Testing
361
+
362
+ First, our army of never sleeping robots will begin a series of automated checks.
363
+ The test suite, documentation, style, and more will be checked on various versions of Python
364
+ with current and legacy packages. Travis CI and GitHub Actions will run testing on Linux, and
365
+ Mac, and Windows. Other services will kick in and check if there is a drop in code coverage
366
+ or any style variations that should be corrected. If you see a red mark by a service, something
367
+ failed and clicking the "Details" link will give you more information. We're happy to help if
368
+ you are stuck.
369
+
370
+ The robots can be difficult to satisfy, but they are there to help everyone write better code.
371
+ In some cases, there will be exceptions to their suggestions, but these are rare. If you make
372
+ changes to your code and push again, the tests will automatically run again.
373
+
374
+ ### Code Review
375
+
376
+ At this point you're waiting on us. You should expect to hear at least a comment within a
377
+ couple of days. We may suggest some changes or improvements or alternatives.
378
+
379
+ Some things that will increase the chance that your pull request is accepted quickly:
380
+
381
+ * Write tests.
382
+ * Follow [PEP8](https://pep8.org) for style. (The `flake8` utility can help with this.)
383
+ * Write a [good commit message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
384
+
385
+ Pull requests will automatically have tests run by Travis. This includes
386
+ running both the unit tests as well as the `flake8` code linter.
387
+
388
+ ### Merging
389
+
390
+ Once we're all happy with the pull request, it's time for it to get merged in. Only the
391
+ maintainers can merge pull requests and you should never merge a pull request you have commits
392
+ on as it circumvents the code review. If this is your first or second pull request, we'll
393
+ likely help by rebasing and cleaning up the commit history for you. As your development skills
394
+ increase, we'll help you learn how to do this.
395
+
396
+ ## More Questions?
397
+
398
+ If you're stuck somewhere or are interested in being a part of the community in
399
+ other ways, feel free to contact us:
400
+
401
+ * [MetPy's Gitter Channel](https://gitter.im/Unidata/MetPy)
402
+ * ["metpy" tag on Stack Overflow](https://stackoverflow.com/questions/tagged/metpy)
403
+ * [Unidata's Python support address](mailto:support-python@unidata.ucar.edu)
404
+ * [python-users](https://mailinglists.unidata.ucar.edu/?software) mailing list
405
+
406
+ ## Further Reading
407
+
408
+ There are a ton of great resources out there on contributing to open source and on the
409
+ importance of writing tested and maintainable software.
410
+
411
+ * [How to Contribute to Open Source Guide](https://opensource.guide/how-to-contribute/)
412
+ * [Zen of Scientific Software Maintenance](https://jrleeman.github.io/ScientificSoftwareMaintenance/)
MetPy/source/LICENSE ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2008-2025, MetPy Developers
4
+ All rights reserved.
5
+
6
+ Redistribution and use in source and binary forms, with or without
7
+ modification, are permitted provided that the following conditions are met:
8
+
9
+ * Redistributions of source code must retain the above copyright notice, this
10
+ list of conditions and the following disclaimer.
11
+
12
+ * Redistributions in binary form must reproduce the above copyright notice,
13
+ this list of conditions and the following disclaimer in the documentation
14
+ and/or other materials provided with the distribution.
15
+
16
+ * Neither the name of the copyright holder nor the names of its
17
+ contributors may be used to endorse or promote products derived from
18
+ this software without specific prior written permission.
19
+
20
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MetPy/source/MANIFEST.in ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ prune staticdata
2
+ prune talks
3
+ exclude docs/_static/*.pdf
MetPy/source/README.md ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MetPy
2
+ =====
3
+
4
+ [![MetPy Logo](https://github.com/Unidata/MetPy/raw/main/docs/_static/metpy_150x150.png)](https://unidata.github.io/MetPy/)
5
+ [![NSF Unidata Logo](https://github.com/Unidata/MetPy/raw/main/docs/_static/NSF-Unidata_lockup_horizontal_2024.png)](https://www.unidata.ucar.edu)
6
+
7
+ [![License](https://img.shields.io/pypi/l/metpy.svg)](https://pypi.python.org/pypi/MetPy/)
8
+ [![Gitter](https://badges.gitter.im/Unidata/MetPy.svg)](https://gitter.im/Unidata/MetPy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
9
+ [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=round-square)](https://egghead.io/series/how-to-contribute-to-an-open-source-project-on-github)
10
+
11
+ [![Latest Docs](https://github.com/Unidata/MetPy/workflows/Build%20Docs/badge.svg)](http://unidata.github.io/MetPy)
12
+ [![PyPI Package](https://img.shields.io/pypi/v/metpy.svg)](https://pypi.python.org/pypi/MetPy/)
13
+ [![Conda Package](https://anaconda.org/conda-forge/metpy/badges/version.svg)](https://anaconda.org/conda-forge/metpy)
14
+ [![PyPI Downloads](https://img.shields.io/pypi/dm/metpy.svg)](https://pypi.python.org/pypi/MetPy/)
15
+ [![Conda Downloads](https://anaconda.org/conda-forge/metpy/badges/downloads.svg)](https://anaconda.org/conda-forge/metpy)
16
+
17
+ [![PyPI Tests](https://github.com/Unidata/MetPy/workflows/PyPI%20Tests/badge.svg)](https://github.com/Unidata/MetPy/actions?query=workflow%3A%22PyPI+Tests%22)
18
+ [![Conda Tests](https://github.com/Unidata/MetPy/workflows/Conda%20Tests/badge.svg)](https://github.com/Unidata/MetPy/actions?query=workflow%3A%22Conda+Tests%22)
19
+ [![Code Coverage Status](https://codecov.io/github/Unidata/MetPy/coverage.svg?branch=main)](https://codecov.io/github/Unidata/MetPy?branch=main)
20
+ [![Codacy Badge](https://app.codacy.com/project/badge/Grade/2e64843f595c42e991457cb76fcfa769)](https://www.codacy.com/gh/Unidata/MetPy/dashboard)
21
+ [![asv](https://img.shields.io/badge/benchmarked%20by-asv-blue.svg?style=flat)](https://unidata.github.io/MetPy-benchmark)
22
+ [![Maintainability](https://qlty.sh/gh/Unidata/projects/MetPy/maintainability.svg)](https://qlty.sh/gh/Unidata/projects/MetPy)
23
+
24
+ MetPy is a collection of tools in Python for reading, visualizing and
25
+ performing calculations with weather data.
26
+
27
+ MetPy follows [semantic versioning](https://semver.org) in its version number. This means
28
+ that any MetPy ``1.x`` release will be backwards compatible with an earlier ``1.y`` release. By
29
+ "backward compatible", we mean that **correct** code that works on a ``1.y`` version will work
30
+ on a future ``1.x`` version.
31
+
32
+ For additional MetPy examples not included in this repository, please see the
33
+ [MetPy Cookbook on Project Pythia](https://projectpythia.org/metpy-cookbook/index.html).
34
+
35
+ We support Python >= 3.11.
36
+
37
+ Need Help?
38
+ ----------
39
+
40
+ Need help using MetPy? Found an issue? Have a feature request? Checkout our
41
+ [support page](https://github.com/Unidata/MetPy/blob/main/SUPPORT.md).
42
+
43
+ Important Links
44
+ ---------------
45
+
46
+ - [HTML Documentation](http://unidata.github.io/MetPy)
47
+ - [MetPy Cookbook on Project Pythia](https://projectpythia.org/metpy-cookbook/index.html)
48
+ - "metpy" tagged questions on [Stack Overflow](https://stackoverflow.com/questions/tagged/metpy)
49
+ - [Gitter chat room](https://gitter.im/Unidata/MetPy)
50
+
51
+ Dependencies
52
+ ------------
53
+
54
+ Other required packages:
55
+
56
+ - Numpy
57
+ - Scipy
58
+ - Matplotlib
59
+ - Pandas
60
+ - Pint
61
+ - Xarray
62
+
63
+ There is also an optional dependency on the pyproj library for geographic
64
+ projections (used with cross sections, grid spacing calculation, and the GiniFile interface).
65
+
66
+ See the [installation guide](https://unidata.github.io/MetPy/latest/userguide/installguide.html)
67
+ for more information.
68
+
69
+ Code of Conduct
70
+ ---------------
71
+
72
+ We want everyone to feel welcome to contribute to MetPy and participate in discussions. In that
73
+ spirit please have a look at our [Code of Conduct](https://github.com/Unidata/MetPy/blob/main/CODE_OF_CONDUCT.md).
74
+
75
+ Contributing
76
+ ------------
77
+
78
+ **Imposter syndrome disclaimer**: We want your help. No, really.
79
+
80
+ There may be a little voice inside your head that is telling you that you're not ready to be
81
+ an open source contributor; that your skills aren't nearly good enough to contribute. What
82
+ could you possibly offer a project like this one?
83
+
84
+ We assure you - the little voice in your head is wrong. If you can write code at all,
85
+ you can contribute code to open source. Contributing to open source projects is a fantastic
86
+ way to advance one's coding skills. Writing perfect code isn't the measure of a good developer
87
+ (that would disqualify all of us!); it's trying to create something, making mistakes, and
88
+ learning from those mistakes. That's how we all improve, and we are happy to help others learn.
89
+
90
+ Being an open source contributor doesn't just mean writing code, either. You can help out by
91
+ writing documentation, tests, or even giving feedback about the project (and yes - that
92
+ includes giving feedback about the contribution process). Some of these contributions may be
93
+ the most valuable to the project as a whole, because you're coming to the project with fresh
94
+ eyes, so you can see the errors and assumptions that seasoned contributors have glossed over.
95
+
96
+ For more information, please read the see the [contributing guide](https://github.com/Unidata/MetPy/blob/main/CONTRIBUTING.md).
97
+
98
+ Philosophy
99
+ ----------
100
+
101
+ The space MetPy aims for is GEMPAK (and maybe NCL)-like functionality, in a way that plugs
102
+ easily into the existing scientific Python ecosystem (numpy, scipy, matplotlib). So, if you
103
+ take the average GEMPAK script for a weather map, you need to:
104
+
105
+ - read data
106
+ - calculate a derived field
107
+ - show on a map/skew-T
108
+
109
+ One of the benefits hoped to achieve over GEMPAK is to make it easier to use these routines for
110
+ any meteorological Python application; this means making it easy to pull out the LCL
111
+ calculation and just use that, or reuse the Skew-T with your own data code. MetPy also prides
112
+ itself on being well-documented and well-tested, so that on-going maintenance is easily
113
+ manageable.
114
+
115
+ The intended audience is that of GEMPAK: researchers, educators, and any one wanting to script
116
+ up weather analysis. It doesn't even have to be scripting; all python meteorology tools are
117
+ hoped to be able to benefit from MetPy. Conversely, it's hoped to be the meteorological
118
+ equivalent of the audience of scipy/scikit-learn/skimage.
MetPy/source/SUPPORT.md ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Support
2
+
3
+ We want to help you use MetPy and solve any challenges you encounter. There
4
+ are many ways to contact us:
5
+
6
+ ## Help using MetPy
7
+
8
+ Need some help using MetPy to solve your problem? The MetPy development team is following all
9
+ of these, so you only need to pick one to share your problem. 😉
10
+
11
+ * [Stack Overflow](https://stackoverflow.com/questions/tagged/metpy): Ask a question using the
12
+ "metpy" tag. This is the highly preferred option as it allows the community to benefit from
13
+ answers to the questions, forming a readily-searched knowledge base. It's also likely to
14
+ result in the quickest response, as not only are the MetPy developers watching, but
15
+ community members can also chime in if they know the answer.
16
+ * [GitHub Discussions](https://github.com/Unidata/MetPy/discussions): Ask your question and
17
+ have a discussion with members of MetPy's community. This is also a forum likely to result
18
+ in a quicker response.
19
+ * [Unidata Python Support email](mailto:support-python@unidata.ucar.edu): Send an email to
20
+ Unidata's Python support email address.
21
+ * [Gitter](https://gitter.im/Unidata/MetPy): text-based chat with the developers; sign in
22
+ using GitHub or Twitter.
23
+ * [MetPy on Bluesky](https://bsky.app/profile/metpy.bsky.social)
24
+ * [MetPy on Twitter](https://twitter.com/MetPy)
25
+ * [Unidata Python User’s Mailing List](https://mailinglists.unidata.ucar.edu/?software)
26
+
27
+ ## Issues
28
+
29
+ Find a problem with MetPy? Looking for a feature we don't have? File an issue!
30
+
31
+ * [New Issue](https://github.com/Unidata/MetPy/issues/new/choose)
32
+ * [Current List of Issues](https://github.com/Unidata/MetPy/issues)
33
+
34
+ ## Code of Conduct
35
+
36
+ We want everyone to feel welcome to contribute to MetPy and participate in discussions. In that
37
+ spirit please have a look at our
38
+ [`code of conduct`](https://github.com/Unidata/MetPy/blob/main/CODE_OF_CONDUCT.md).
MetPy/source/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ MetPy Project Package Initialization File
4
+ """
MetPy/source/benchmarks/Dockerfile ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ FROM python:3.12
2
+
3
+ RUN pip install --no-cache-dir netcdf4 asv pysu metpy
4
+
5
+ COPY --chmod=700 entrypoint.sh /
6
+
7
+ ENTRYPOINT ["/entrypoint.sh"]
MetPy/source/benchmarks/Jenkinsfile ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pipeline {
2
+ agent { label 'main' }
3
+ environment {
4
+ CLONE_DIR = "temp_repo_results"
5
+ }
6
+ stages {
7
+ // checks out the results repo using secret stored in Jenkins
8
+ stage('Checkout results repo') {
9
+ steps {
10
+ sh 'git config --global credential.helper cache'
11
+ sh 'git config --global push.default simple'
12
+ checkout scmGit(branches: [[name: '*/main']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'temp_repo_results'], cloneOption(depth: 1, noTags: false, reference: '', shallow: true)], userRemoteConfigs: [[credentialsId: 'GH_DEPLOY_KEY_METPY_BENCH_RESULTS', url: 'git@github.com:Unidata/MetPy-benchmark.git']])
13
+ }
14
+ }
15
+ // copies past results into the asv/results folder on the main repo
16
+ stage('Copy past results') {
17
+ steps {
18
+ sh '''
19
+ if [ -d ${CLONE_DIR}/results ]; then
20
+ echo "-------Copying results--------"
21
+ cp -r ${CLONE_DIR}/results/* benchmarks/asv/results
22
+ fi
23
+ '''
24
+ }
25
+ }
26
+ // generates the hashes to run and stores them in a text file
27
+ stage('Setup for ASV run') {
28
+ steps {
29
+ sh '''
30
+ cd benchmarks
31
+ bash generate_hashes.sh
32
+ cd ..
33
+ '''
34
+ }
35
+ }
36
+ // Runs ASV in the docker container
37
+ // The catch error ensures that the build works even if some ASV fail
38
+ stage('Run ASV') {
39
+ steps {
40
+ catchError(buildResult: 'SUCCESS') {
41
+ sh '''
42
+ cd benchmarks
43
+ docker build -t metpy-benchmarks:latest .
44
+ cd ..
45
+ docker run --rm -v .:/container-benchmarks --hostname Docker_Container -e DUID=$(id -u) -e DGID=$(id -g) metpy-benchmarks:latest benchmark
46
+ '''
47
+ }
48
+ }
49
+ }
50
+ // Copies results from the asv/results into the results repo
51
+ stage('Copy results') {
52
+ steps{
53
+ sh '''
54
+ if [ -d "${CLONE_DIR}/results" ]; then
55
+ echo "--------results repo exist-------"
56
+ else
57
+ mkdir ${CLONE_DIR}/results
58
+ fi
59
+ cp -r benchmarks/asv/results/* ${CLONE_DIR}/results
60
+ '''
61
+ }
62
+ }
63
+ // Pushes to the git repo if there have been changes
64
+ stage('Update results repo') {
65
+ steps {
66
+ withCredentials([sshUserPrivateKey(credentialsId: 'GH_DEPLOY_KEY_METPY_BENCH_RESULTS', keyFileVariable: 'deploy_key')]) {
67
+ sh '''
68
+ if [ -n "$(git status --porcelain)" ]; then
69
+ cd ${CLONE_DIR}
70
+ git add --all
71
+ git commit -m "Jenkins Updating Benchmark Results BUILD-NUMBER:${BUILD_NUMBER}" || echo "-----no changes to commit-----"
72
+ export GIT_SSH_COMMAND="ssh -i ${deploy_key}"
73
+ git push origin HEAD:main --force
74
+ fi
75
+ '''
76
+ }
77
+ }
78
+ }
79
+ }
80
+ post {
81
+ // always removes the temporary repo regardless of build status
82
+ always {
83
+ echo "---Cleaning up temporary repo---"
84
+ sh 'rm -rf "${CLONE_DIR}"'
85
+ }
86
+ }
87
+ }
MetPy/source/benchmarks/asv.conf.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ // The version of the config file format. Do not change, unless
3
+ // you know what you are doing.
4
+ "version": 1,
5
+
6
+ // The name of the project being benchmarked
7
+ "project": "metpy",
8
+
9
+ // The project's homepage
10
+ "project_url": "https://unidata.github.io/MetPy/latest/",
11
+
12
+ // The URL or local path of the source code repository for the
13
+ // project being benchmarked
14
+ "repo": "..",
15
+
16
+ // The Python project's subdirectory in your repo. If missing or
17
+ // the empty string, the project is assumed to be located at the root
18
+ // of the repository.
19
+ //"repo_subdir": "benchmarks",
20
+
21
+ // Customizable commands for building the project.
22
+ // See asv.conf.json documentation.
23
+ // To build the package using pyproject.toml (PEP518), uncomment the following lines
24
+ "build_command": [
25
+ "python -m pip install build",
26
+ "python -m build",
27
+ "python -mpip wheel -w {build_cache_dir} {build_dir}"
28
+ ],
29
+ // To build the package using setuptools and a setup.py file, uncomment the following lines
30
+ // "build_command": [
31
+ // "python setup.py build",
32
+ // "python -mpip wheel -w {build_cache_dir} {build_dir}"
33
+ // ],
34
+
35
+ // Customizable commands for installing and uninstalling the project.
36
+ // See asv.conf.json documentation.
37
+ "install_command": ["in-dir={env_dir} python -mpip install {build_dir}"],
38
+ "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"],
39
+
40
+ // List of branches to benchmark. If not provided, defaults to "main"
41
+ // (for git) or "default" (for mercurial).
42
+ "branches": ["HEAD"], // for git
43
+ // "branches": ["default"], // for mercurial
44
+
45
+ // The DVCS being used. If not set, it will be automatically
46
+ // determined from "repo" by looking at the protocol in the URL
47
+ // (if remote), or by looking for special directories, such as
48
+ // ".git" (if local).
49
+ // "dvcs": "git",
50
+
51
+ // The tool to use to create environments. May be "conda",
52
+ // "virtualenv", "mamba" (above 3.8)
53
+ // or other value depending on the plugins in use.
54
+ // If missing or the empty string, the tool will be automatically
55
+ // determined by looking for tools on the PATH environment
56
+ // variable.
57
+ "environment_type": "virtualenv",
58
+
59
+ // timeout in seconds for installing any dependencies in environment
60
+ // defaults to 10 min
61
+ //"install_timeout": 600,
62
+
63
+ // the base URL to show a commit for the project.
64
+ "show_commit_url": "http://github.com/unidata/metpy/commit/",
65
+
66
+ // The Pythons you'd like to test against. If not provided, defaults
67
+ // to the current version of Python used to run `asv`.
68
+ //"pythons": ["3.8", "3.12"],
69
+
70
+ // The list of conda channel names to be searched for benchmark
71
+ // dependency packages in the specified order
72
+ "conda_channels": ["conda-forge"],
73
+
74
+ // A conda environment file that is used for environment creation.
75
+ // "conda_environment_file": "environment.yml",
76
+
77
+ // The matrix of dependencies to test. Each key of the "req"
78
+ // requirements dictionary is the name of a package (in PyPI) and
79
+ // the values are version numbers. An empty list or empty string
80
+ // indicates to just test against the default (latest)
81
+ // version. null indicates that the package is to not be
82
+ // installed. If the package to be tested is only available from
83
+ // PyPi, and the 'environment_type' is conda, then you can preface
84
+ // the package name by 'pip+', and the package will be installed
85
+ // via pip (with all the conda available packages installed first,
86
+ // followed by the pip installed packages).
87
+ //
88
+ // The ``@env`` and ``@env_nobuild`` keys contain the matrix of
89
+ // environment variables to pass to build and benchmark commands.
90
+ // An environment will be created for every combination of the
91
+ // cartesian product of the "@env" variables in this matrix.
92
+ // Variables in "@env_nobuild" will be passed to every environment
93
+ // during the benchmark phase, but will not trigger creation of
94
+ // new environments. A value of ``null`` means that the variable
95
+ // will not be set for the current combination.
96
+ //
97
+ "matrix": {
98
+ "req": {
99
+ "matplotlib": [
100
+ ],
101
+ "numpy": [
102
+ ],
103
+ "pandas": [
104
+ ],
105
+ "pint": [
106
+ ],
107
+ "pooch": [
108
+ ],
109
+ "pyproj": [
110
+ ],
111
+ "scipy": [
112
+ ],
113
+ "traitlets": [
114
+ ],
115
+ "xarray": [
116
+ ],
117
+ "netcdf4": [
118
+ ],
119
+ }
120
+ },
121
+ // Combinations of libraries/python versions can be excluded/included
122
+ // from the set to test. Each entry is a dictionary containing additional
123
+ // key-value pairs to include/exclude.
124
+ //
125
+ // An exclude entry excludes entries where all values match. The
126
+ // values are regexps that should match the whole string.
127
+ //
128
+ // An include entry adds an environment. Only the packages listed
129
+ // are installed. The 'python' key is required. The exclude rules
130
+ // do not apply to includes.
131
+ //
132
+ // In addition to package names, the following keys are available:
133
+ //
134
+ // - python
135
+ // Python version, as in the *pythons* variable above.
136
+ // - environment_type
137
+ // Environment type, as above.
138
+ // - sys_platform
139
+ // Platform, as in sys.platform. Possible values for the common
140
+ // cases: 'linux2', 'win32', 'cygwin', 'darwin'.
141
+ // - req
142
+ // Required packages
143
+ // - env
144
+ // Environment variables
145
+ // - env_nobuild
146
+ // Non-build environment variables
147
+ //
148
+ // "exclude": [
149
+ // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
150
+ // {"environment_type": "conda", "req": {"six": null}}, // don't run without six on conda
151
+ // {"env": {"ENV_VAR_1": "val2"}}, // skip val2 for ENV_VAR_1
152
+ // ],
153
+ //
154
+ // "include": [
155
+ // // additional env for python3.12
156
+ // {"python": "3.12", "req": {"numpy": "1.26"}, "env_nobuild": {"FOO": "123"}},
157
+ // // additional env if run on windows+conda
158
+ // {"platform": "win32", "environment_type": "conda", "python": "3.12", "req": {"libpython": ""}},
159
+ // ],
160
+
161
+ // The directory (relative to the current directory) that benchmarks are
162
+ // stored in. If not provided, defaults to "benchmarks"
163
+ // "benchmark_dir": "benchmarks",
164
+
165
+ // The directory (relative to the current directory) to cache the Python
166
+ // environments in. If not provided, defaults to "env"
167
+ "env_dir": "asv/env",
168
+
169
+ // The directory (relative to the current directory) that raw benchmark
170
+ // results are stored in. If not provided, defaults to "results".
171
+ "results_dir": "asv/results",
172
+
173
+ // The directory (relative to the current directory) that the html tree
174
+ // should be written to. If not provided, defaults to "html".
175
+ "html_dir": "asv/html",
176
+
177
+ // The number of characters to retain in the commit hashes.
178
+ // "hash_length": 8,
179
+
180
+ // `asv` will cache results of the recent builds in each
181
+ // environment, making them faster to install next time. This is
182
+ // the number of builds to keep, per environment.
183
+ "build_cache_size": 52
184
+
185
+ // The commits after which the regression search in `asv publish`
186
+ // should start looking for regressions. Dictionary whose keys are
187
+ // regexps matching to benchmark names, and values corresponding to
188
+ // the commit (exclusive) after which to start looking for
189
+ // regressions. The default is to start from the first commit
190
+ // with results. If the commit is `null`, regression detection is
191
+ // skipped for the matching benchmark.
192
+ //
193
+ // "regressions_first_commits": {
194
+ // "some_benchmark": "352cdf", // Consider regressions only after this commit
195
+ // "another_benchmark": null, // Skip regression detection altogether
196
+ // },
197
+
198
+ // The thresholds for relative change in results, after which `asv
199
+ // publish` starts reporting regressions. Dictionary of the same
200
+ // form as in ``regressions_first_commits``, with values
201
+ // indicating the thresholds. If multiple entries match, the
202
+ // maximum is taken. If no entry matches, the default is 5%.
203
+ //
204
+ // "regressions_thresholds": {
205
+ // "some_benchmark": 0.01, // Threshold of 1%
206
+ // "another_benchmark": 0.5, // Threshold of 50%
207
+ // },
208
+ }
MetPy/source/benchmarks/asv_run_script.sh ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ #Run asv
3
+
4
+ # Generate artificial data file for benchmarks
5
+ python3 data_array_generate.py
6
+
7
+ #Set up asv machine
8
+ asv machine --yes
9
+
10
+ # Runs asv on the commits in the hash file but skips ones that already have results
11
+ asv run --skip-existing-successful HASHFILE:no_bot_merge_commits.txt
MetPy/source/benchmarks/benchmarks/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """initialization file required for ASV to run."""
MetPy/source/benchmarks/benchmarks/apparent_temp_benchmarks.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import xarray as xr
14
+
15
+ import metpy.calc as mpcalc
16
+
17
+
18
+ class TimeSuite:
19
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
20
+
21
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
22
+ performance
23
+
24
+ """
25
+
26
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
27
+ version = '2025.07.02'
28
+
29
+ def setup_cache(self):
30
+ """Collect the sample dataset from the filepath and opens it as an xarray.
31
+
32
+ Returns
33
+ -------
34
+ ds
35
+ Dataset with artificial meteorology data for testing
36
+ """
37
+ base_path = os.path.dirname(__file__) # path to current file
38
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
39
+ file_path = os.path.abspath(file_path)
40
+ ds = xr.open_dataset(file_path)
41
+ return ds
42
+
43
+ def setup(self, ds):
44
+ """Set up the appropriate slices from the sample dataset for testing.
45
+
46
+ Parameters
47
+ ----------
48
+ ds : dataset
49
+ The dataset made in setup_cache which contains the testing data
50
+ """
51
+ self.pressureslice = ds.isel(pressure=0, time=0)
52
+ self.timeslice = ds.isel(time=0)
53
+
54
+ def time_apparent_temperature(self, pressureslice):
55
+ """Benchmarking calculating apparent temperature on a 2d grid."""
56
+ mpcalc.apparent_temperature(self.pressureslice.temperature,
57
+ self.pressureslice.relative_humidity,
58
+ self.pressureslice.windspeed)
59
+
60
+ def time_heat_index(self, timeslice):
61
+ """Benchmarking calculating heat index on a 3d cube."""
62
+ mpcalc.heat_index(self.timeslice.temperature, self.timeslice.relative_humidity)
63
+
64
+ def time_windchill(self, timeslice):
65
+ """Benchmarking calculating windchill on a 3d cube."""
66
+ mpcalc.windchill(self.timeslice.temperature, self.timeslice.windspeed)
MetPy/source/benchmarks/benchmarks/bound_layer_turbulence_benchmarks.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import xarray as xr
14
+
15
+ import metpy.calc as mpcalc
16
+ from metpy.units import units
17
+
18
+
19
+ class TimeSuite:
20
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
21
+
22
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
23
+ performance
24
+
25
+ """
26
+
27
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
28
+ version = '2025.07.02'
29
+
30
+ def setup_cache(self):
31
+ """Collect the sample dataset from the filepath and opens it as an xarray.
32
+
33
+ Returns
34
+ -------
35
+ ds
36
+ Dataset with artificial meteorology data for testing
37
+ """
38
+ base_path = os.path.dirname(__file__) # path to current file
39
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
40
+ file_path = os.path.abspath(file_path)
41
+ ds = xr.open_dataset(file_path)
42
+ return ds
43
+
44
+ def setup(self, ds):
45
+ """Set up the appropriate slices from the sample dataset for testing.
46
+
47
+ Parameters
48
+ ----------
49
+ ds : dataset
50
+ The dataset made in setup_cache which contains the testing data
51
+ """
52
+ self.timeslice = ds.isel(time=0)
53
+
54
+ def time_brunt_vaisala_frequency(self, timeslice):
55
+ """Benchmark Brunt Vaisala frequency calculation on a cube."""
56
+ mpcalc.brunt_vaisala_frequency(self.timeslice.height, self.timeslice.theta)
57
+
58
+ def time_gradient_richardson_number(self, timeslice):
59
+ """Benchmark Gradient Richardson Number on a cube."""
60
+ mpcalc.gradient_richardson_number(self.timeslice.height, self.timeslice.theta,
61
+ self.timeslice.uwind, self.timeslice.vwind)
62
+
63
+ def time_tke(self, ds):
64
+ """Benchmarking turbulent kinetic energy calculation on a cube."""
65
+ mpcalc.tke(ds.uwind.values * units('m/s'), ds.vwind.values * units('m/s'),
66
+ ds.wwind.values * units('m/s'))
67
+
68
+ def time_brunt_vaisala_period(self, timeslice):
69
+ """Benchmark Brunt Vaisala frequency calculation on a cube."""
70
+ mpcalc.brunt_vaisala_period(self.timeslice.height, self.timeslice.theta)
MetPy/source/benchmarks/benchmarks/dry_thermo_benchmarks.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import xarray as xr
14
+
15
+ import metpy.calc as mpcalc
16
+ from metpy.units import units
17
+
18
+
19
+ class TimeSuite:
20
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
21
+
22
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
23
+ performance
24
+
25
+ """
26
+
27
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
28
+ version = '2025.07.07'
29
+
30
+ def setup_cache(self):
31
+ """Collect the sample dataset from the filepath and opens it as an xarray.
32
+
33
+ Returns
34
+ -------
35
+ ds
36
+ Dataset with artificial meteorology data for testing
37
+ """
38
+ base_path = os.path.dirname(__file__) # path to current file
39
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
40
+ file_path = os.path.abspath(file_path)
41
+ ds = xr.open_dataset(file_path)
42
+ return ds
43
+
44
+ def setup(self, ds):
45
+ """Set up the appropriate slices from the sample dataset for testing.
46
+
47
+ Parameters
48
+ ----------
49
+ ds : dataset
50
+ The dataset made in setup_cache which contains the testing data
51
+ """
52
+ self.pressureslice = ds.isel(pressure=0, time=0)
53
+ self.timeslice = ds.isel(time=0)
54
+ self.profileslice = ds.isel(time=0, lat=0, lon=0)
55
+
56
+ def time_density(self, pressureslice):
57
+ """Benchmarking density calculation on a 2d surface."""
58
+ mpcalc.density(self.pressureslice.pressure, self.pressureslice.temperature,
59
+ self.pressureslice.mixing_ratio)
60
+
61
+ def time_height_to_geopotential(self, timeslice):
62
+ """Benchmarking the height to geopotenial calculation on a 3d cube."""
63
+ mpcalc.height_to_geopotential(self.timeslice.height)
64
+
65
+ def time_potential_temperature(self, timeslice):
66
+ """Benchmarking the potential temperature calculation on a 3d cube."""
67
+ mpcalc.potential_temperature(self.timeslice.pressure, self.timeslice.temperature)
68
+
69
+ def time_static_stability(self, timeslice):
70
+ """Benchmarking static stability calculation on a 3d cube."""
71
+ mpcalc.static_stability(self.timeslice.pressure, self.timeslice.temperature)
72
+
73
+ def time_thickness_hydrostatic(self, timeslice):
74
+ """Benchmarking hydrostatic thickness calculation on a 3d cube."""
75
+ mpcalc.thickness_hydrostatic(self.timeslice.pressure, self.timeslice.temperature,
76
+ self.timeslice.mixing_ratio)
77
+
78
+ def time_dry_lapse(self, timeslice):
79
+ """Benchmarking the dry lapse calculation on a 3d cube."""
80
+ mpcalc.dry_lapse(self.timeslice.pressure, self.timeslice.temperature)
81
+
82
+ def time_sigma_to_pressure(self, timeslice):
83
+ """Benchmarking the sigma to pressure calculation on a 3d cube."""
84
+ mpcalc.sigma_to_pressure(self.timeslice.sigma, self.timeslice.pressure[0],
85
+ self.timeslice.pressure[49])
86
+
87
+ def time_geopotential_to_height(self, timeslice):
88
+ """Benchmarking the geopotential to height calculation on a 3d cube."""
89
+ mpcalc.geopotential_to_height(self.timeslice.geopotential)
90
+
91
+ def time_add_pressure_to_height(self, timeslice):
92
+ """Benchmarking adding pressure to height on a 3d cube."""
93
+ mpcalc.add_pressure_to_height(self.timeslice.height, self.timeslice.pressure)
94
+
95
+ def time_add_height_to_pressure(self, timeslice):
96
+ """Benchmarking adding height to pressure on a 3d cube."""
97
+ mpcalc.add_height_to_pressure(self.timeslice.pressure.values * units('hPa'),
98
+ self.timeslice.height.values * units('km'))
99
+
100
+ def time_temperature_from_potential_temperature(self, timeslice):
101
+ """Benchmarking calculating temperature from potential temperature on a 3d cube."""
102
+ mpcalc.temperature_from_potential_temperature(self.timeslice.pressure,
103
+ self.timeslice.theta)
104
+
105
+ def time_mean_pressure_weighted(self, profileslice):
106
+ """Benchmarking calculating weighted mean of pressure with temp on one profile."""
107
+ mpcalc.mean_pressure_weighted(self.profileslice.pressure,
108
+ self.profileslice.temperature)
109
+
110
+ def time_weighted_continuous_average(self, profileslice):
111
+ """Bencharmking calculating weighted continuous average on one profile."""
112
+ mpcalc.weighted_continuous_average(self.profileslice.pressure,
113
+ self.profileslice.temperature)
114
+
115
+ def time_dry_static_energy(self, timeslice):
116
+ """Benchmarking dry static energy calculation on a 3d cube."""
117
+ mpcalc.dry_static_energy(self.timeslice.height, self.timeslice.temperature)
MetPy/source/benchmarks/benchmarks/dyn_kin_benchmarks.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import xarray as xr
14
+
15
+ import metpy.calc as mpcalc
16
+ import metpy.interpolate as mpinter
17
+
18
+
19
+ class TimeSuite:
20
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
21
+
22
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
23
+ performance
24
+
25
+ """
26
+
27
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
28
+ version = '2025.07.03'
29
+
30
+ def setup_cache(self):
31
+ """Collect the sample dataset from the filepath and opens it as an xarray.
32
+
33
+ Returns
34
+ -------
35
+ ds
36
+ Dataset with artificial meteorology data for testing
37
+ """
38
+ base_path = os.path.dirname(__file__) # path to current file
39
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
40
+ file_path = os.path.abspath(file_path)
41
+ ds = xr.open_dataset(file_path)
42
+ ds = ds.metpy.parse_cf()
43
+ return ds
44
+
45
+ def setup(self, ds):
46
+ """Set up the appropriate slices from the sample dataset for testing.
47
+
48
+ Parameters
49
+ ----------
50
+ ds : dataset
51
+ The dataset made in setup_cache which contains the testing data
52
+ """
53
+ self.pressureslice = ds.isel(pressure=0, time=0)
54
+ self.timeslice = ds.isel(time=0)
55
+ self.profileslice = ds.isel(time=0, lat=0, lon=0)
56
+ start = (30., 260.)
57
+ end = (40., 270.)
58
+ self.cross = mpinter.cross_section(self.timeslice, start, end).set_coords(('lat', 'lon'
59
+ ))
60
+
61
+ def time_absolute_vorticity(self, pressureslice):
62
+ """Benchmarking absolute momentum calculation on a 2d surface."""
63
+ mpcalc.absolute_vorticity(self.pressureslice.uwind, self.pressureslice.vwind)
64
+
65
+ def time_advection(self, timeslice):
66
+ """Benchmarking the advection calculation of t on a 3d cube."""
67
+ mpcalc.advection(self.timeslice.temperature, self.timeslice.uwind,
68
+ self.timeslice.vwind)
69
+
70
+ def time_ageostrophic_wind(self, pressureslice):
71
+ """Benchmarking ageostrophic wind calculation on a 2d surface."""
72
+ mpcalc.ageostrophic_wind(self.pressureslice.height, self.pressureslice.uwind,
73
+ self.pressureslice.vwind)
74
+
75
+ def time_frontogenesis(self, pressureslice):
76
+ """Benchmarking the calculation of frontogenesis of a 2d field."""
77
+ mpcalc.frontogenesis(self.pressureslice.theta, self.pressureslice.uwind,
78
+ self.pressureslice.vwind)
79
+
80
+ def time_potential_vorticity_barotropic(self, timeslice):
81
+ """Benchmarking the barotropic potential vorticity calculation on a cube."""
82
+ mpcalc.potential_vorticity_barotropic(self.timeslice.height, self.timeslice.uwind,
83
+ self.timeslice.vwind)
84
+
85
+ def time_q_vector(self, pressureslice):
86
+ """Benchmarking q vector calculation on a 2d slice."""
87
+ mpcalc.q_vector(self.pressureslice.uwind, self.pressureslice.vwind,
88
+ self.pressureslice.temperature, self.pressureslice.pressure)
89
+
90
+ def time_total_deformation(self, pressureslice):
91
+ """Benchmarking total deformation calculation on a 2d slice."""
92
+ mpcalc.total_deformation(self.pressureslice.uwind, self.pressureslice.vwind)
93
+
94
+ def time_vorticity(self, pressureslice):
95
+ """Benchmarking vorticity calculation on a 2d slice."""
96
+ mpcalc.vorticity(self.pressureslice.uwind, self.pressureslice.vwind)
97
+
98
+ def time_shear_vorticity(self, pressureslice):
99
+ """Benchmarking shear vorticity on a 2d slice."""
100
+ mpcalc.shear_vorticity(self.pressureslice.uwind, self.pressureslice.vwind)
101
+
102
+ def time_absolute_momentum(self, cross):
103
+ """Benchmarking absolute momentum calculation."""
104
+ mpcalc.absolute_momentum(self.cross.uwind, self.cross.vwind)
105
+
106
+ def time_potential_vorticity_baroclinic(self, timeslice):
107
+ """Benchmarking potential vorticity baroclinic on a 3d cube."""
108
+ mpcalc.potential_vorticity_baroclinic(self.timeslice.theta, self.timeslice.pressure,
109
+ self.timeslice.uwind, self.timeslice.vwind)
110
+
111
+ def time_inertal_advective_wind(self, timeslice):
112
+ """Benchmarking inertal advective wind calculation on a 3d cube."""
113
+ mpcalc.inertial_advective_wind(self.timeslice.uwind, self.timeslice.vwind,
114
+ self.timeslice.uwind, self.timeslice.vwind)
115
+
116
+ def time_curvature_vorticity(self, timeslice):
117
+ """Benchmarking the curvature vorticity calculation on a 3d cube."""
118
+ mpcalc.curvature_vorticity(self.timeslice.uwind, self.timeslice.vwind)
119
+
120
+ def time_montgomery_streamfunction(self, pressureslice):
121
+ """Benchmarking the montgomery streamfunction calculation on a 2d grid."""
122
+ mpcalc.montgomery_streamfunction(self.pressureslice.height,
123
+ self.pressureslice.temperature)
124
+
125
+ def time_wind_direction(self, timeslice):
126
+ """Benchmarking the wind direction calculation on a 3d cube."""
127
+ mpcalc.wind_direction(self.timeslice.uwind, self.timeslice.vwind)
128
+
129
+ def time_wind_components(self, timeslice):
130
+ """Benchmarking the wind components calculation on a 3d cube."""
131
+ mpcalc.wind_components(self.timeslice.windspeed, self.timeslice.winddir)
132
+
133
+ def time_divergence(self, timeslice):
134
+ """Benchmarking divergence on a 3d cube."""
135
+ mpcalc.divergence(self.timeslice.uwind, self.timeslice.vwind)
136
+
137
+ def time_stretching_deformation(self, timeslice):
138
+ """Benchmarking stretching deformation on a 3d cube."""
139
+ mpcalc.stretching_deformation(self.timeslice.uwind, self.timeslice.vwind)
140
+
141
+ def time_shearing_deformation(self, timeslice):
142
+ """Benchmarking shearing deformation on a 3d cube."""
143
+ mpcalc.shearing_deformation(self.timeslice.uwind, self.timeslice.vwind)
144
+
145
+ def time_geostrophic_wind(self, timeslice):
146
+ """Benchmarking the geostrophic wind calculation on a 3d cube."""
147
+ mpcalc.geostrophic_wind(self.timeslice.height, latitude=self.timeslice.lat)
148
+
149
+ def time_coriolis_parameter(self, timeslice):
150
+ """Benchmarking coriolis parameter calculation on a 3d cube."""
151
+ mpcalc.coriolis_parameter(self.timeslice.lat)
152
+
153
+ def time_wind_speed(self, timeslice):
154
+ """Benchmarking wind speed calculation on a 3d cube."""
155
+ mpcalc.wind_speed(self.timeslice.uwind, self.timeslice.vwind)
156
+
157
+ def time_exner_function(self, timeslice):
158
+ """Benchmark exner function calculation on a cube."""
159
+ mpcalc.exner_function(self.timeslice.pressure)
MetPy/source/benchmarks/benchmarks/math_fctn_benchmarks.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import xarray as xr
14
+
15
+ import metpy.calc as mpcalc
16
+ import metpy.interpolate as mpinter
17
+
18
+
19
+ class TimeSuite:
20
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
21
+
22
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
23
+ performance
24
+
25
+ """
26
+
27
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
28
+ version = '2025.07.03'
29
+
30
+ def setup_cache(self):
31
+ """Collect the sample dataset from the filepath and opens it as an xarray.
32
+
33
+ Returns
34
+ -------
35
+ ds
36
+ Dataset with artificial meteorology data for testing
37
+ """
38
+ base_path = os.path.dirname(__file__) # path to current file
39
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
40
+ file_path = os.path.abspath(file_path)
41
+ ds = xr.open_dataset(file_path)
42
+ ds = ds.metpy.parse_cf()
43
+ return ds
44
+
45
+ def setup(self, ds):
46
+ """Set up the appropriate slices from the sample dataset for testing.
47
+
48
+ Parameters
49
+ ----------
50
+ ds : dataset
51
+ The dataset made in setup_cache which contains the testing data
52
+ """
53
+ self.pressureslice = ds.isel(pressure=0, time=0)
54
+ self.timeslice = ds.isel(time=0)
55
+ start = (30., 260.)
56
+ end = (40., 270.)
57
+ self.cross = mpinter.cross_section(self.timeslice,
58
+ start, end).set_coords(('lat', 'lon'))
59
+
60
+ def time_geospatial_gradient(self, pressureslice):
61
+ """Benchmarking calculating the geospatial gradient of temp on a 2d array."""
62
+ mpcalc.geospatial_gradient(self.pressureslice.temperature)
63
+
64
+ def time_geospatial_laplacian(self, pressureslice):
65
+ """Benchmarking calculating the geospatial laplacian of temp on a 2d array."""
66
+ mpcalc.geospatial_laplacian(self.pressureslice.temperature)
67
+
68
+ def time_gradient(self, timeslice):
69
+ """Benchmarking calculating the gradient of temp on a 3d cube."""
70
+ mpcalc.gradient(self.timeslice.temperature)
71
+
72
+ def time_vector_derivative(self, pressureslice):
73
+ """Benchmarking calculating the vector derivative of wind on a 2d slice."""
74
+ mpcalc.vector_derivative(self.pressureslice.uwind, self.pressureslice.vwind)
75
+
76
+ def time_tangential_component(self, cross):
77
+ """Benchmarking calculation of the tangential component of wind on a slice."""
78
+ mpcalc.tangential_component(self.cross.uwind, self.cross.vwind)
79
+
80
+ def time_cross_section_components(self, cross):
81
+ """Benchmarking the cross section components of a wind grid."""
82
+ mpcalc.cross_section_components(self.cross.uwind, self.cross.vwind)
83
+
84
+ def time_normal_component(self, cross):
85
+ """Benchmarking the calculating normal components times."""
86
+ mpcalc.normal_component(self.cross.uwind, self.cross.vwind)
MetPy/source/benchmarks/benchmarks/moist_thermo_benchmarks.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import xarray as xr
14
+
15
+ import metpy.calc as mpcalc
16
+ from metpy.units import units
17
+
18
+
19
+ class TimeSuite:
20
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
21
+
22
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
23
+ performance
24
+
25
+ """
26
+
27
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
28
+ version = '2025.07.02'
29
+
30
+ def setup_cache(self):
31
+ """Collect the sample dataset from the filepath and opens it as an xarray.
32
+
33
+ Returns
34
+ -------
35
+ ds
36
+ Dataset with artificial meteorology data for testing
37
+ """
38
+ base_path = os.path.dirname(__file__) # path to current file
39
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
40
+ file_path = os.path.abspath(file_path)
41
+ ds = xr.open_dataset(file_path)
42
+ return ds
43
+
44
+ def setup(self, ds):
45
+ """Set up the appropriate slices from the sample dataset for testing.
46
+
47
+ Parameters
48
+ ----------
49
+ ds : dataset
50
+ The dataset made in setup_cache which contains the testing data
51
+ """
52
+ self.pressureslice = ds.isel(pressure=0, time=0)
53
+ self.timeslice = ds.isel(time=0)
54
+ self.upperslice = ds.isel(pressure=49, time=0)
55
+ self.profileslice = ds.isel(time=0, lat=25, lon=25)
56
+
57
+ def time_virtual_temperature(self, timeslice):
58
+ """Benchmark virtual temperature on a 3d cube."""
59
+ mpcalc.virtual_temperature(self.timeslice.temperature, self.timeslice.mixing_ratio)
60
+
61
+ def time_dewpoint(self, timeslice):
62
+ """Benchmarking dewpoint from vapor pressure on a 3d cube."""
63
+ mpcalc.dewpoint(self.timeslice.vapor_pressure)
64
+
65
+ def time_rh_from_mixing_ratio(self, timeslice):
66
+ """Benchmarking relative humidity from mixing ratio on a 3d cube."""
67
+ mpcalc.relative_humidity_from_mixing_ratio(self.timeslice.pressure,
68
+ self.timeslice.temperature,
69
+ self.timeslice.mixing_ratio)
70
+
71
+ def time_dewpoint_from_rh(self, timeslice):
72
+ """Benchmarking dewpoint from calculated on a 3d cube."""
73
+ mpcalc.dewpoint_from_relative_humidity(self.timeslice.temperature,
74
+ self.timeslice.relative_humidity)
75
+
76
+ def time_precipitable_water(self, timeslice):
77
+ """Benchmarking precipitable water calculation for one column."""
78
+ mpcalc.precipitable_water(self.timeslice.pressure, self.timeslice.dewpoint[0][0])
79
+
80
+ def time_wet_bulb_temperature(self, pressureslice):
81
+ """Benchmarking wet bulb temperature calculation on on a slice."""
82
+ mpcalc.wet_bulb_temperature(self.pressureslice.pressure,
83
+ self.pressureslice.temperature,
84
+ self.pressureslice.dewpoint)
85
+
86
+ def time_scale_height(self, pressureslice):
87
+ """Benchmarking the calculation for the scale height of a layer for 2 surfaces."""
88
+ mpcalc.scale_height(self.upperslice.temperature, self.pressureslice.temperature)
89
+
90
+ def time_moist_lapse(self, profileslice):
91
+ """Benchmarking the calculation for the moist lapse rate for one profile."""
92
+ mpcalc.moist_lapse(self.profileslice.pressure.values * units('hPa'),
93
+ self.profileslice.temperature[0].values * units('K'))
94
+
95
+ def time_saturation_vapor_pressure(self, timeslice):
96
+ """Benchmarking the saturation vapor pressure calculation for a 3d cube."""
97
+ mpcalc.saturation_vapor_pressure(self.timeslice.temperature)
98
+
99
+ def time_water_latent_heat_vaporization(self, timeslice):
100
+ """Benchmarking the vaporization latent heat calculation on a 3d cube."""
101
+ mpcalc.water_latent_heat_vaporization(self.timeslice.temperature)
102
+
103
+ def time_water_latent_heat_sublimation(self, timeslice):
104
+ """Benchmarking the sublimation latent heat calculation on a 3d cube."""
105
+ mpcalc.water_latent_heat_sublimation(self.timeslice.temperature)
106
+
107
+ def time_water_latent_heat_melting(self, timeslice):
108
+ """Benchmarking the melting latent heat calculation on a 3d cube."""
109
+ mpcalc.water_latent_heat_melting(self.timeslice.temperature)
110
+
111
+ def time_specific_humidity_from_dewpoint(self, timeslice):
112
+ """Benchmarking specific humidity from dewpoint calculation on a 3d cube."""
113
+ mpcalc.specific_humidity_from_dewpoint(self.timeslice.pressure,
114
+ self.timeslice.temperature)
115
+
116
+ def time_relative_humidity_from_dewpoint(self, timeslice):
117
+ """Benchmarking relative humidity from dewpoint calculation on a 3d cube."""
118
+ mpcalc.relative_humidity_from_dewpoint(self.timeslice.temperature,
119
+ self.timeslice.dewpoint)
120
+
121
+ def time_moist_static_energy(self, timeslice):
122
+ """Benchmarking moist static energy calculation on a 3d cube."""
123
+ mpcalc.moist_static_energy(self.timeslice.height, self.timeslice.temperature,
124
+ self.timeslice.specific_humidity)
125
+
126
+ def time_dewpoint_from_specific_humidity(self, timeslice):
127
+ """Benchmarking dewpoint from specific humidity calculation on a 3d cube."""
128
+ mpcalc.dewpoint_from_specific_humidity(self.timeslice.pressure,
129
+ self.timeslice.temperature,
130
+ self.timeslice.specific_humidity)
131
+
132
+ def time_moist_air_specific_heat_pressure(self, timeslice):
133
+ """Benchmarking moist air specific heat pressure calculation on a 3d cube."""
134
+ mpcalc.moist_air_specific_heat_pressure(self.timeslice.specific_humidity)
135
+
136
+ def time_moist_air_poisson_exponent(self, timeslice):
137
+ """Benchmarking moist air poisson exponent calculation on a cube."""
138
+ mpcalc.moist_air_poisson_exponent(self.timeslice.specific_humidity)
139
+
140
+ def time_relative_humidity_wet_psychrometric(self, timeslice):
141
+ """Benchmarking the relative humidity from psychometric calculation on a cube."""
142
+ mpcalc.relative_humidity_wet_psychrometric(self.timeslice.pressure,
143
+ self.timeslice.temperature,
144
+ self.timeslice.wet_bulb_temperature)
145
+
146
+ def time_thickness_hydrostatic_from_relative_humidity(self, profileslice):
147
+ """Benchmarking thickness calculation from relative humidity on one profile."""
148
+ mpcalc.thickness_hydrostatic_from_relative_humidity(self.profileslice.pressure,
149
+ self.profileslice.temperature,
150
+ self.profileslice.relative_humidity
151
+ )
152
+
153
+ def time_relative_humidity_from_specific_humidity(self, timeslice):
154
+ """Benchmarking relative humidity from specific humidity calculation on a 3d cube."""
155
+ mpcalc.relative_humidity_from_specific_humidity(self.timeslice.pressure,
156
+ self.timeslice.temperature,
157
+ self.timeslice.specific_humidity)
158
+
159
+ def time_wet_bulb_potential_temperature(self, timeslice):
160
+ """Benchmarking the wet bulb potential temperature calculation on a 3d cube."""
161
+ mpcalc.wet_bulb_potential_temperature(self.timeslice.pressure,
162
+ self.timeslice.temperature,
163
+ self.timeslice.dewpoint)
164
+
165
+ def time_vertical_velocity_pressure(self, timeslice):
166
+ """Benchmarking vertical velocity wrt pressure calculation on a 3d cube."""
167
+ mpcalc.vertical_velocity_pressure(self.timeslice.wwind, self.timeslice.pressure,
168
+ self.timeslice.temperature,
169
+ self.timeslice.mixing_ratio)
170
+
171
+ def time_vertical_velocity(self, timeslice):
172
+ """Benchmarking vertical velocity calculation on a 3d cube."""
173
+ mpcalc.vertical_velocity(self.timeslice.omega, self.timeslice.pressure,
174
+ self.timeslice.temperature,
175
+ self.timeslice.mixing_ratio)
176
+
177
+ def time_saturation_equivalent_potential_temperature(self, timeslice):
178
+ """Benchmarking saturation equivalent potential temperature on 3d cube."""
179
+ mpcalc.saturation_equivalent_potential_temperature(self.timeslice.pressure,
180
+ self.timeslice.temperature)
181
+
182
+ def time_virtual_potential_temperature(self, timeslice):
183
+ """Benchmarking virtual potential temperature calculation on a 3d cube."""
184
+ mpcalc.virtual_potential_temperature(self.timeslice.pressure,
185
+ self.timeslice.temperature,
186
+ self.timeslice.mixing_ratio)
187
+
188
+ def time_psychrometric_vapor_pressure_wet(self, timeslice):
189
+ """Benchmarking psychrometric vapor pressure calculation on a 3d cube."""
190
+ mpcalc.psychrometric_vapor_pressure_wet(self.timeslice.pressure,
191
+ self.timeslice.temperature,
192
+ self.timeslice.wet_bulb_temperature)
193
+
194
+ def time_mixing_ratio_from_relative_humidity(self, timeslice):
195
+ """Benchmarking mixing ratio from relative humidity calculation on a 3d cube."""
196
+ mpcalc.mixing_ratio_from_relative_humidity(self.timeslice.pressure,
197
+ self.timeslice.temperature,
198
+ self.timeslice.relative_humidity)
199
+
200
+ def time_mixing_ratio_from_specific_humidity(self, timeslice):
201
+ """Benchmarking calculating mixing rato from specific humidity on a 3d cube."""
202
+ mpcalc.mixing_ratio_from_specific_humidity(self.timeslice.specific_humidity)
203
+
204
+ def time_relative_humidity_from_mixing_ratio(self, timeslice):
205
+ """Benchmarking relative humidity from mixing ratio calculation on a 3d cube."""
206
+ mpcalc.relative_humidity_from_mixing_ratio(self.timeslice.pressure,
207
+ self.timeslice.temperature,
208
+ self.timeslice.mixing_ratio)
209
+
210
+ def time_equivalent_potential_temperature(self, timeslice):
211
+ """Benchmarking equivalent potential temperature calculation on 3d cube."""
212
+ mpcalc.equivalent_potential_temperature(self.timeslice.pressure,
213
+ self.timeslice.temperature,
214
+ self.timeslice.dewpoint)
215
+
216
+ def time_virtual_temperature_from_dewpoint(self, timeslice):
217
+ """Benchmarking virtual temperature from dewpoint calculation on 3d cube."""
218
+ mpcalc.virtual_temperature_from_dewpoint(self.timeslice.pressure,
219
+ self.timeslice.temperature,
220
+ self.timeslice.dewpoint)
MetPy/source/benchmarks/benchmarks/other_benchmarks.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import xarray as xr
14
+
15
+ import metpy.calc as mpcalc
16
+ from metpy.units import units
17
+
18
+
19
+ class TimeSuite:
20
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
21
+
22
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
23
+ performance
24
+
25
+ """
26
+
27
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
28
+ version = '2025.07.02'
29
+
30
+ def setup_cache(self):
31
+ """Collect the sample dataset from the filepath and opens it as an xarray.
32
+
33
+ Returns
34
+ -------
35
+ ds
36
+ Dataset with artificial meteorology data for testing
37
+ """
38
+ base_path = os.path.dirname(__file__) # path to current file
39
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
40
+ file_path = os.path.abspath(file_path)
41
+ ds = xr.open_dataset(file_path)
42
+ return ds
43
+
44
+ def setup(self, ds):
45
+ """Set up the appropriate slices from the sample dataset for testing.
46
+
47
+ Parameters
48
+ ----------
49
+ ds : dataset
50
+ The dataset made in setup_cache which contains the testing data
51
+ """
52
+ self.ds = ds
53
+ self.pressureslice = ds.isel(pressure=0, time=0)
54
+ self.timeslice = ds.isel(time=0)
55
+ self.lineslice = ds.isel(pressure=0, time=0, lat=0)
56
+ self.profileslice = ds.isel(time=0, lat=0, lon=0)
57
+
58
+ def time_find_intersections(self, lineslice):
59
+ """Benchmarking finding intersections calculation."""
60
+ mpcalc.find_intersections(self.lineslice.lon, self.lineslice.temperature,
61
+ self.lineslice.dewpoint)
62
+
63
+ def time_find_peaks(self, pressureslice):
64
+ """Benchmarking finding peaks of 2d dewpoint slice."""
65
+ mpcalc.find_peaks(self.pressureslice.dewpoint)
66
+
67
+ def time_get_perturbation(self, ds):
68
+ """Benchmarking getting the perturbation of a time series."""
69
+ mpcalc.get_perturbation(self.ds.temperature)
70
+
71
+ def time_peak_persistence(self, pressureslice):
72
+ """Benchmarking calculating persistence of of maxima point in 3d."""
73
+ mpcalc.peak_persistence(self.pressureslice.dewpoint)
74
+
75
+ def time_isentropic_interpolation_as_dataset(self, timeslice):
76
+ """Benchmarking the isentropic interpolation as dataset calculation on a 3d cube."""
77
+ mpcalc.isentropic_interpolation_as_dataset([265.] * units.kelvin,
78
+ self.timeslice.temperature)
79
+
80
+ def time_isentropic_interpolation(self, timeslice):
81
+ """Bencharking the isentropic interpolation calculation on a 3d cube."""
82
+ mpcalc.isentropic_interpolation([265.] * units.kelvin, self.timeslice.pressure,
83
+ self.timeslice.temperature)
MetPy/source/benchmarks/benchmarks/smoothing_benchmarks.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import numpy as np
14
+ import xarray as xr
15
+
16
+ import metpy.calc as mpcalc
17
+
18
+
19
+ class TimeSuite:
20
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
21
+
22
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
23
+ performance
24
+
25
+ """
26
+
27
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
28
+ version = '2025.07.02'
29
+
30
+ def setup_cache(self):
31
+ """Collect the sample dataset from the filepath and opens it as an xarray.
32
+
33
+ Returns
34
+ -------
35
+ ds
36
+ Dataset with artificial meteorology data for testing
37
+ """
38
+ base_path = os.path.dirname(__file__) # path to current file
39
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
40
+ file_path = os.path.abspath(file_path)
41
+ ds = xr.open_dataset(file_path)
42
+ return ds
43
+
44
+ def setup(self, ds):
45
+ """Set up the appropriate slices from the sample dataset for testing.
46
+
47
+ Parameters
48
+ ----------
49
+ ds : dataset
50
+ The dataset made in setup_cache which contains the testing data
51
+ """
52
+ self.pressureslice = ds.isel(pressure=0, time=0)
53
+ self.timeslice = ds.isel(time=0)
54
+
55
+ def time_smooth_gaussian(self, pressureslice):
56
+ """Benchmarking the gaussian smoothing of a 2d grid."""
57
+ mpcalc.smooth_gaussian(self.pressureslice.relative_humidity, 5)
58
+
59
+ def time_smooth_window(self, pressureslice):
60
+ """Benchmarking the window smoothing of a 2d grid."""
61
+ mpcalc.smooth_window(self.pressureslice.relative_humidity, np.diag(np.ones(5)))
62
+
63
+ def time_smooth_rectangular(self, pressureslice):
64
+ """Benchmarking the rectangular smoothing of a 2d grid."""
65
+ mpcalc.smooth_rectangular(self.pressureslice.relative_humidity, (3, 7))
66
+
67
+ def time_smooth_circular(self, pressureslice):
68
+ """Benchmarking the circular smoothing of a 2d grid."""
69
+ mpcalc.smooth_circular(self.pressureslice.relative_humidity, 2)
70
+
71
+ def time_smooth_n_point(self, pressureslice):
72
+ """Benchmarking the 5 point smoothing of a 2d grid."""
73
+ mpcalc.smooth_n_point(self.pressureslice.relative_humidity)
74
+
75
+ def time_zoom_xarray(self, pressureslice):
76
+ """Benchmarking the zoom xarray function."""
77
+ mpcalc.zoom_xarray(self.pressureslice.temperature, zoom=3.0)
MetPy/source/benchmarks/benchmarks/soundings_benchmarks.py ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2025 MetPy Developers.
2
+ # Distributed under the terms of the BSD 3-Clause License.
3
+ # SPDX-License-Identifier: BSD-3-Clause
4
+ """Benchmark the functions in the moist thermo section of metpy's calc module.
5
+
6
+ Uses Airspeed Velocity for benchmarking and uses artificial dataset to ensure consistent and
7
+ reliable data for results.
8
+
9
+ """
10
+
11
+ import os
12
+
13
+ import xarray as xr
14
+
15
+ import metpy.calc as mpcalc
16
+ from metpy.units import units
17
+
18
+
19
+ class TimeSuite:
20
+ """Benchmark moist thermo functions in time using Airspeed Velocity and xarray datasets.
21
+
22
+ Uses ASV's benchmarking format to load in data and run benchmarks to measure time
23
+ performance
24
+
25
+ """
26
+
27
+ # NOTE: I'm using CalVer https://calver.org/ YYYY.MM.DD
28
+ version = '2025.07.21'
29
+
30
+ def setup_cache(self):
31
+ """Collect the sample dataset from the filepath and opens it as an xarray.
32
+
33
+ Returns
34
+ -------
35
+ ds
36
+ Dataset with artificial meteorology data for testing
37
+ """
38
+ base_path = os.path.dirname(__file__) # path to current file
39
+ file_path = os.path.join(base_path, '..', 'data_array_compressed.nc')
40
+ file_path = os.path.abspath(file_path)
41
+ ds = xr.open_dataset(file_path)
42
+ return ds
43
+
44
+ def setup(self, ds):
45
+ """Set up the appropriate slices from the sample dataset for testing.
46
+
47
+ Parameters
48
+ ----------
49
+ ds : dataset
50
+ The dataset made in setup_cache which contains the testing data
51
+ """
52
+ self.timeslice = ds.isel(time=0)
53
+ self.pressureslice = ds.isel(time=0, pressure=0)
54
+ self.profileslice = ds.isel(lat=25, lon=25, time=0)
55
+ self.parcelprofile = mpcalc.parcel_profile(self.profileslice.pressure,
56
+ self.profileslice.temperature[0],
57
+ self.profileslice.dewpoint[0])
58
+ self.sbcape, _ = mpcalc.surface_based_cape_cin(self.profileslice.pressure,
59
+ self.profileslice.temperature,
60
+ self.profileslice.dewpoint)
61
+ self.sblcl, _ = mpcalc.lcl(self.profileslice.pressure,
62
+ self.profileslice.temperature,
63
+ self.profileslice.dewpoint)
64
+ self.sblclheight = mpcalc.pressure_to_height_std(self.sblcl)
65
+ _, _, self.relhel = mpcalc.storm_relative_helicity(self.profileslice.height,
66
+ self.profileslice.uwind,
67
+ self.profileslice.vwind,
68
+ 1 * units('km'))
69
+ self.shearu, self.shearv = mpcalc.bulk_shear(self.profileslice.pressure,
70
+ self.profileslice.uwind,
71
+ self.profileslice.vwind)
72
+ self.shear = mpcalc.wind_speed(self.shearu, self.shearv)
73
+
74
+ def time_bulk_shear(self, profileslice):
75
+ """Benchmarking calculating the bulk shear of a profile."""
76
+ mpcalc.bulk_shear(self.profileslice.pressure, self.profileslice.uwind,
77
+ self.profileslice.vwind)
78
+
79
+ def time_ccl(self, profileslice):
80
+ """Benchmarking calculating the convective condensation level of a profile."""
81
+ mpcalc.ccl(self.profileslice.pressure, self.profileslice.temperature,
82
+ self.profileslice.dewpoint)
83
+
84
+ def time_parcel_profile(self, profileslice):
85
+ """Benchmarking the atmospheric parcel profile for one profile."""
86
+ mpcalc.parcel_profile(self.profileslice.pressure, self.profileslice.temperature[0],
87
+ self.profileslice.dewpoint[0])
88
+
89
+ def time_most_unstable_parcel(self, profileslice):
90
+ """Benchmarking the calculation to find the most unstable parcel for one profile."""
91
+ mpcalc.most_unstable_parcel(self.profileslice.pressure, self.profileslice.temperature,
92
+ self.profileslice.dewpoint)
93
+
94
+ def time_cape_cin(self, profileslice):
95
+ """Benchmarking cape_cin calculation for one profile."""
96
+ mpcalc.cape_cin(self.profileslice.pressure, self.profileslice.temperature,
97
+ self.profileslice.dewpoint, self.parcelprofile)
98
+
99
+ def time_lcl(self, timeslice):
100
+ """Benchmarks lcl on a 3d cube - many profiles."""
101
+ mpcalc.lcl(self.timeslice.pressure, self.timeslice.temperature,
102
+ self.timeslice.dewpoint)
103
+
104
+ def time_el(self, profileslice):
105
+ """Benchmarks el calculation on one profile."""
106
+ mpcalc.el(self.profileslice.pressure, self.profileslice.temperature,
107
+ self.profileslice.dewpoint)
108
+
109
+ def time_storm_relative_helicity(self, profileslice):
110
+ """Benchmarks storm relative helicity over one profile."""
111
+ mpcalc.storm_relative_helicity(self.profileslice.height, self.profileslice.uwind,
112
+ self.profileslice.vwind, 1 * units('km'))
113
+
114
+ def time_vertical_totals(self, timeslice):
115
+ """Benchmarking vertical totals for many profiles."""
116
+ mpcalc.vertical_totals(self.timeslice.pressure, self.timeslice.temperature)
117
+
118
+ def time_supercell_composite(self, profileslice):
119
+ """Benchmarks supercell composite calculation for one calculation."""
120
+ mpcalc.supercell_composite(2500 * units('J/kg'), 125 * units('m^2/s^2'),
121
+ 50 * units.knot)
122
+
123
+ def time_critical_angle(self, profileslice):
124
+ """Benchmarking critical angle on one profile."""
125
+ mpcalc.critical_angle(self.profileslice.pressure, self.profileslice.uwind,
126
+ self.profileslice.vwind, self.profileslice.height,
127
+ 0 * units('m/s'), 0 * units('m/s'))
128
+
129
+ def time_bunkers_storm_motion(self, profileslice):
130
+ """Benchmarking bunkers storm motion on one profile."""
131
+ mpcalc.bunkers_storm_motion(self.profileslice.pressure, self.profileslice.uwind,
132
+ self.profileslice.vwind, self.profileslice.height)
133
+
134
+ def time_corfidi_storm_motion(self, profileslice):
135
+ """Benchmarking corfidi storm motion on one profile."""
136
+ mpcalc.corfidi_storm_motion(self.profileslice.pressure, self.profileslice.uwind,
137
+ self.profileslice.vwind)
138
+
139
+ def time_sweat_index(self, timeslice):
140
+ """Benchmarking SWEAT index on many profiles."""
141
+ mpcalc.sweat_index(self.timeslice.pressure, self.timeslice.temperature,
142
+ self.timeslice.dewpoint, self.timeslice.windspeed,
143
+ self.timeslice.winddir)
144
+
145
+ def time_most_unstable_cape_cin(self, profileslice):
146
+ """Benchmarking most unstable cape cin calculation on one profile."""
147
+ mpcalc.most_unstable_cape_cin(self.profileslice.pressure,
148
+ self.profileslice.temperature,
149
+ self.profileslice.dewpoint)
150
+
151
+ def time_surface_based_cape_cin(self, profileslice):
152
+ """Benchmarking surface based cape cin calculation on one profile."""
153
+ mpcalc.surface_based_cape_cin(self.profileslice.pressure,
154
+ self.profileslice.temperature,
155
+ self.profileslice.dewpoint)
156
+
157
+ def time_lifted_index(self, profileslice):
158
+ """Benchmarking lifted index calculation on one profile."""
159
+ mpcalc.lifted_index(self.profileslice.pressure, self.profileslice.temperature,
160
+ self.parcelprofile)
161
+
162
+ def time_k_index(self, timeslice):
163
+ """Benchmarking k index calculation on many profiles."""
164
+ mpcalc.k_index(self.timeslice.pressure, self.timeslice.temperature,
165
+ self.timeslice.dewpoint)
166
+
167
+ def time_mixed_layer_cape_cin(self, profileslice):
168
+ """Benchmarking mixed layer cape cin calculation for one profile."""
169
+ mpcalc.mixed_layer_cape_cin(self.profileslice.pressure, self.profileslice.temperature,
170
+ self.profileslice.dewpoint)
171
+
172
+ def time_cross_totals(self, timeslice):
173
+ """Benchmarking cross totals calculation on many profiles."""
174
+ mpcalc.cross_totals(self.timeslice.pressure, self.timeslice.temperature,
175
+ self.timeslice.dewpoint)
176
+
177
+ def time_downdraft_cape(self, profileslice):
178
+ """Benchmarking downdraft cape calculation on one profile."""
179
+ mpcalc.downdraft_cape(self.profileslice.pressure, self.profileslice.temperature,
180
+ self.profileslice.dewpoint)
181
+
182
+ def time_parcel_profile_with_lcl_as_dataset(self, profileslice):
183
+ """Benchmarking parcel profile with lcl as dataset one on profile."""
184
+ mpcalc.parcel_profile_with_lcl_as_dataset(self.profileslice.pressure,
185
+ self.profileslice.temperature,
186
+ self.profileslice.dewpoint)
187
+
188
+ def time_showalter_index(self, profileslice):
189
+ """Benchmarking calculating the showalter index on one profiles."""
190
+ mpcalc.showalter_index(self.profileslice.pressure, self.profileslice.temperature,
191
+ self.profileslice.dewpoint)
192
+
193
+ def time_galvez_davison_index(self, timeslice):
194
+ """Benchmarking calculating the galvez davison index on many profiles."""
195
+ mpcalc.galvez_davison_index(self.timeslice.pressure, self.timeslice.temperature,
196
+ self.timeslice.mixing_ratio, self.timeslice.pressure[0])
197
+
198
+ def time_significant_tornado(self, profileslice):
199
+ """Benchmarking significant tornado param for one profile."""
200
+ mpcalc.significant_tornado(self.sbcape, self.sblclheight, self.relhel, self.shear)
201
+
202
+ def time_total_totals_index(self, timeslice):
203
+ """Benchmarking total totals index for many profiles."""
204
+ mpcalc.total_totals_index(self.timeslice.pressure, self.timeslice.temperature,
205
+ self.timeslice.dewpoint)
206
+
207
+ def time_lfc(self, profileslice):
208
+ """Benchmarking level of free convection calculation for one profile."""
209
+ mpcalc.lfc(self.profileslice.pressure, self.profileslice.temperature,
210
+ self.profileslice.dewpoint)
211
+
212
+ def time_mixed_parcel(self, profileslice):
213
+ """Benchmarking mixed parcel for one profile."""
214
+ mpcalc.mixed_parcel(self.profileslice.pressure, self.profileslice.temperature,
215
+ self.profileslice.dewpoint)
216
+
217
+ def time_mixed_layer(self, profileslice):
218
+ """Benchmarking mixed layer of temperature for one profile."""
219
+ mpcalc.mixed_layer(self.profileslice.pressure, self.profileslice.temperature)
220
+
221
+ def time_parcel_profile_with_lcl(self, profileslice):
222
+ """Benchmarking parcel profile with lcl calculation."""
223
+ mpcalc.parcel_profile_with_lcl(self.profileslice.pressure,
224
+ self.profileslice.temperature,
225
+ self.profileslice.dewpoint)