code stringlengths 582 165k | apis sequence | extract_api stringlengths 325 147k |
|---|---|---|
# !usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under a 3-clause BSD license.
#
# @Author: <NAME>
# @Date: 2018-10-11 17:51:43
# @Last modified by: <NAME>
# @Last Modified time: 2018-11-29 17:23:15
from __future__ import print_function, division, absolute_import
import numpy as np
import astropy
import astropy.units as u
import marvin.tools
from marvin.tools.quantities.spectrum import Spectrum
from marvin.utils.general.general import get_drpall_table
from marvin.utils.plot.scatter import plot as scatplot
from marvin import log
from .base import VACMixIn, VACTarget
def choose_best_spectrum(par1, par2, conf_thresh=0.1):
'''choose optimal HI spectrum based on the following criteria:
(1) If both detected and unconfused, choose highest SNR
(2) If both detected and both confused, choose lower confusion prob.
(3) If both detected and one confused, choose non-confused
(4) If one non-confused detection and one non-detection, go with detection
(5) If one confused detetion and one non-detection, go with non-detection
(6) If niether detected, choose lowest rms
par1 and par2 are dictionaries with the following parameters:
program - gbt or alfalfa
snr - integrated SNR
rms - rms noise level
conf_prob - confusion probability
conf_thresh = maximum confusion probability below which we classify
the object as essentially unconfused. Default to 0.1 following
(Stark+21)
'''
programs = [par1['program'],par2['program']]
sel_high_snr = np.argmax([par1['snr'],par2['snr']])
sel_low_rms = np.argmin([par1['rms'],par2['rms']])
sel_low_conf = np.argmin([par1['conf_prob'],par2['conf_prob']])
#both detected
if (par1['snr'] > 0) & (par2['snr'] > 0):
if (par1['conf_prob'] <= conf_thresh) & (par2['conf_prob'] <= conf_thresh):
pick = sel_high_snr
elif (par1['conf_prob'] <= conf_thresh) & (par2['conf_prob'] > conf_thresh):
pick = 0
elif (par1['conf_prob'] > conf_thresh) & (par2['conf_prob'] <= conf_thresh):
pick = 1
elif (par1['conf_prob'] > conf_thresh) & (par2['conf_prob'] > conf_thresh):
pick = sel_low_conf
#both nondetected
elif (par1['snr'] <= 0) & (par2['snr'] <= 0):
pick = sel_low_rms
#one detected
elif (par1['snr'] > 0) & (par2['snr'] <= 0):
if par1['conf_prob'] < conf_thresh:
pick=0
else:
pick=1
elif (par1['snr'] <= 0) & (par2['snr'] > 0):
if par2['conf_prob'] < conf_thresh:
pick=1
else:
pick=0
return programs[pick]
class HIVAC(VACMixIn):
"""Provides access to the MaNGA-HI VAC.
VAC name: HI
URL: https://www.sdss.org/dr17/data_access/value-added-catalogs/?vac_id=hi-manga-data-release-1
Description: Returns HI summary data and spectra
Authors: <NAME> and <NAME>
"""
# Required parameters
name = 'HI'
description = 'Returns HI summary data and spectra'
version = {'MPL-7': 'v1_0_1', 'DR15': 'v1_0_1', 'DR16': 'v1_0_2', 'DR17': 'v2_0_1', 'MPL-11': 'v2_0_1'}
display_name = 'HI'
url = 'https://www.sdss.org/dr17/data_access/value-added-catalogs/?vac_id=hi-manga-data-release-1'
# optional Marvin Tools to attach your vac to
include = (marvin.tools.cube.Cube, marvin.tools.maps.Maps, marvin.tools.modelcube.ModelCube)
# optional methods to attach to your main VAC tool in ~marvin.tools.vacs.VACs
add_methods = ['plot_mass_fraction']
# Required method
def set_summary_file(self, release):
''' Sets the path to the HI summary file '''
# define the variables to build a unique path to your VAC file
self.path_params = {'ver': self.version[release], 'type': 'all', 'program': 'GBT16A_095'}
# get_path returns False if the files do not exist locally
self.summary_file = self.get_path("mangahisum", path_params=self.path_params)
def set_program(self,plateifu):
# download the vac from the SAS if it does not already exist locally
if not self.file_exists(self.summary_file):
self.summary_file = self.download_vac('mangahisum', path_params=self.path_params)
# Find all entries in summary file with this plate-ifu.
# Need the full summary file data.
# Find best entry between GBT/ALFALFA based on dept and confusion.
# Then update self.path_params['program'] with alfalfa or gbt.
summary = HITarget(plateifu, vacfile=self.summary_file)._data
galinfo = summary[summary['plateifu'] == plateifu]
if len(galinfo) == 1 and galinfo['session']=='ALFALFA':
program = 'alfalfa'
elif len(galinfo) in [0, 1]:
# if no entry found or session is GBT, default program to gbt
program = 'gbt'
else:
par1 = {'program': 'gbt','snr': 0.,'rms': galinfo[0]['rms'], 'conf_prob': galinfo[0]['conf_prob']}
par2 = {'program': 'gbt','snr': 0.,'rms': galinfo[1]['rms'], 'conf_prob': galinfo[1]['conf_prob']}
if galinfo[0]['session']=='ALFALFA':
par1['program'] = 'alfalfa'
if galinfo[1]['session']=='ALFALFA':
par2['program'] = 'alfalfa'
if galinfo[0]['fhi'] > 0:
par1['snr'] = galinfo[0]['fhi']/galinfo[0]['efhi']
if galinfo[1]['fhi'] > 0:
par2['snr'] = galinfo[1]['fhi']/galinfo[1]['efhi']
program = choose_best_spectrum(par1,par2)
log.info('Using HI data from {0}'.format(program))
# get path to ancillary VAC file for target HI spectra
self.update_path_params({'program':program})
# Required method
def get_target(self, parent_object):
''' Accesses VAC data for a specific target from a Marvin Tool object '''
# get any parameters you need from the parent object
plateifu = parent_object.plateifu
self.update_path_params({'plateifu': plateifu})
if parent_object.release in ['DR17', 'MPL-11']:
self.set_program(plateifu)
specfile = self.get_path('mangahispectra', path_params=self.path_params)
# create container for more complex return data
hidata = HITarget(plateifu, vacfile=self.summary_file, specfile=specfile)
# get the spectral data for that row if it exists
if hidata._indata and not self.file_exists(specfile):
hidata._specfile = self.download_vac('mangahispectra', path_params=self.path_params)
return hidata
class HITarget(VACTarget):
''' A customized target class to also display HI spectra
This class handles data from both the HI summary file and the
individual spectral files. Row data from the summary file for the given target
is returned via the `data` property. Spectral data can be displayed via
the the `plot_spectrum` method.
Parameters:
targetid (str):
The plateifu or mangaid designation
vacfile (str):
The path of the VAC summary file
specfile (str):
The path to the HI spectra
Attributes:
data:
The target row data from the main VAC file
targetid (str):
The target identifier
'''
def __init__(self, targetid, vacfile, specfile=None):
super(HITarget, self).__init__(targetid, vacfile)
self._specfile = specfile
self._specdata = None
def plot_spectrum(self):
''' Plot the HI spectrum '''
if self._specfile:
if not self._specdata:
self._specdata = self._get_data(self._specfile)
vel = self._specdata['VHI'][0]
flux = self._specdata['FHI'][0]
spec = Spectrum(flux, unit=u.Jy, wavelength=vel,
wavelength_unit=u.km / u.s)
ax = spec.plot(
ylabel='HI\ Flux\ Density', xlabel='Velocity', title=self.targetid, ytrim='minmax'
)
return ax
return None
#
# Functions to become available on your VAC in marvin.tools.vacs.VACs
def plot_mass_fraction(vacdata_object):
''' Plot the HI mass fraction
Computes and plots the HI mass fraction using
the NSA elliptical Petrosian stellar mass from the
MaNGA DRPall file. Only plots data for subset of
targets in both the HI VAC and the DRPall file.
Parameters:
vacdata_object (object):
The `~.VACDataClass` instance of the HI VAC
Example:
>>> from marvin.tools.vacs import VACs
>>> v = VACs()
>>> hi = v.HI
>>> hi.plot_mass_fraction()
'''
drpall = get_drpall_table()
drpall.add_index('plateifu')
data = vacdata_object.data[1].data
subset = drpall.loc[data['plateifu']]
log_stmass = np.log10(subset['nsa_elpetro_mass'])
diff = data['logMHI'] - log_stmass
fig, axes = scatplot(
log_stmass,
diff,
with_hist=False,
ylim=[-5, 5],
xlabel=r'log $M_*$',
ylabel=r'log $M_{HI}/M_*$',
)
return axes[0]
| [
"marvin.utils.general.general.get_drpall_table",
"marvin.utils.plot.scatter.plot",
"marvin.tools.quantities.spectrum.Spectrum"
] | [((1530, 1567), 'numpy.argmax', 'np.argmax', (["[par1['snr'], par2['snr']]"], {}), "([par1['snr'], par2['snr']])\n", (1539, 1567), True, 'import numpy as np\n'), ((1585, 1622), 'numpy.argmin', 'np.argmin', (["[par1['rms'], par2['rms']]"], {}), "([par1['rms'], par2['rms']])\n", (1594, 1622), True, 'import numpy as np\n'), ((1641, 1690), 'numpy.argmin', 'np.argmin', (["[par1['conf_prob'], par2['conf_prob']]"], {}), "([par1['conf_prob'], par2['conf_prob']])\n", (1650, 1690), True, 'import numpy as np\n'), ((8779, 8797), 'marvin.utils.general.general.get_drpall_table', 'get_drpall_table', ([], {}), '()\n', (8795, 8797), False, 'from marvin.utils.general.general import get_drpall_table\n'), ((8929, 8965), 'numpy.log10', 'np.log10', (["subset['nsa_elpetro_mass']"], {}), "(subset['nsa_elpetro_mass'])\n", (8937, 8965), True, 'import numpy as np\n'), ((9021, 9130), 'marvin.utils.plot.scatter.plot', 'scatplot', (['log_stmass', 'diff'], {'with_hist': '(False)', 'ylim': '[-5, 5]', 'xlabel': '"""log $M_*$"""', 'ylabel': '"""log $M_{HI}/M_*$"""'}), "(log_stmass, diff, with_hist=False, ylim=[-5, 5], xlabel=\n 'log $M_*$', ylabel='log $M_{HI}/M_*$')\n", (9029, 9130), True, 'from marvin.utils.plot.scatter import plot as scatplot\n'), ((7863, 7932), 'marvin.tools.quantities.spectrum.Spectrum', 'Spectrum', (['flux'], {'unit': 'u.Jy', 'wavelength': 'vel', 'wavelength_unit': '(u.km / u.s)'}), '(flux, unit=u.Jy, wavelength=vel, wavelength_unit=u.km / u.s)\n', (7871, 7932), False, 'from marvin.tools.quantities.spectrum import Spectrum\n')] |
#!/usr/bin/env python
# encoding: utf-8
#
# bpt.py
#
# Created by <NAME> on 19 Jan 2017.
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from packaging.version import parse
import warnings
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.axes_grid1 import ImageGrid
from marvin.core.exceptions import MarvinDeprecationWarning, MarvinError
from marvin.utils.plot import bind_to_figure
__ALL__ = ('get_snr', 'kewley_sf_nii', 'kewley_sf_sii', 'kewley_sf_oi',
'kewley_comp_nii', 'kewley_agn_sii', 'kewley_agn_oi',
'bpt_kewley06')
def get_snr(snr_min, emission_line, default=3):
"""Convenience function to get the minimum SNR for a certain emission line.
If ``snr_min`` is a dictionary and ``emision_line`` is one of the keys,
returns that value. If the emission line is not included in the dictionary,
returns ``default``. If ``snr_min`` is a float, returns that value
regardless of the ``emission_line``.
"""
if not isinstance(snr_min, dict):
return snr_min
if emission_line in snr_min:
return snr_min[emission_line]
else:
return default
def get_masked(maps, emline, snr=1):
"""Convenience function to get masked arrays without negative values."""
gflux = maps['emline_gflux_' + emline]
gflux_masked = gflux.masked
# Masks spaxels with flux <= 0
gflux_masked.mask |= (gflux_masked.data <= 0)
# Masks all spaxels that don't reach the cutoff SNR
gflux_masked.mask |= gflux.snr < snr
gflux_masked.mask |= gflux.ivar == 0
return gflux_masked
def _get_kewley06_axes(use_oi=True):
"""Creates custom axes for displaying Kewley06 plots."""
fig = plt.figure(None, (8.5, 10))
fig.clf()
plt.subplots_adjust(top=0.99, bottom=0.08, hspace=0.01)
# The axes for the three classification plots
imgrid_kwargs = {'add_all': True} if parse(matplotlib.__version__) < parse('3.5.0') else {}
grid_bpt = ImageGrid(fig, 211,
nrows_ncols=(1, 3) if use_oi else (1, 2),
direction='row',
axes_pad=0.1,
label_mode='L',
share_all=False, **imgrid_kwargs)
# The axes for the galaxy display
gal_bpt = ImageGrid(fig, 212, nrows_ncols=(1, 1))
# Plots the classification boundary lines
xx_sf_nii = np.linspace(-1.281, 0.045, int(1e4))
xx_sf_sii = np.linspace(-2, 0.315, int(1e4))
xx_sf_oi = np.linspace(-2.5, -0.7, int(1e4))
xx_comp_nii = np.linspace(-2, 0.4, int(1e4))
xx_agn_sii = np.array([-0.308, 1.0])
xx_agn_oi = np.array([-1.12, 0.5])
grid_bpt[0].plot(xx_sf_nii, kewley_sf_nii(xx_sf_nii), 'k--', zorder=90)
grid_bpt[1].plot(xx_sf_sii, kewley_sf_sii(xx_sf_sii), 'r-', zorder=90)
if use_oi:
grid_bpt[2].plot(xx_sf_oi, kewley_sf_oi(xx_sf_oi), 'r-', zorder=90)
grid_bpt[0].plot(xx_comp_nii, kewley_comp_nii(xx_comp_nii), 'r-', zorder=90)
grid_bpt[1].plot(xx_agn_sii, kewley_agn_sii(xx_agn_sii), 'b-', zorder=80)
if use_oi:
grid_bpt[2].plot(xx_agn_oi, kewley_agn_oi(xx_agn_oi), 'b-', zorder=80)
# Adds captions
grid_bpt[0].text(-1, -0.5, 'SF', ha='center', fontsize=12, zorder=100, color='c')
grid_bpt[0].text(0.5, 0.5, 'AGN', ha='left', fontsize=12, zorder=100)
grid_bpt[0].text(-0.08, -1.2, 'Comp', ha='left', fontsize=12, zorder=100, color='g')
grid_bpt[1].text(-1.2, -0.5, 'SF', ha='center', fontsize=12, zorder=100)
grid_bpt[1].text(-1, 1.2, 'Seyfert', ha='left', fontsize=12, zorder=100, color='r')
grid_bpt[1].text(0.3, -1, 'LINER', ha='left', fontsize=12, zorder=100, color='m')
if use_oi:
grid_bpt[2].text(-2, -0.5, 'SF', ha='center', fontsize=12, zorder=100)
grid_bpt[2].text(-1.5, 1, 'Seyfert', ha='left', fontsize=12, zorder=100)
grid_bpt[2].text(-0.1, -1, 'LINER', ha='right', fontsize=12, zorder=100)
# Sets the ticks, ticklabels, and other details
xtick_limits = ((-2, 1), (-1.5, 1), (-2.5, 0.5))
axes = [0, 1, 2] if use_oi else [0, 1]
for ii in axes:
grid_bpt[ii].get_xaxis().set_tick_params(direction='in')
grid_bpt[ii].get_yaxis().set_tick_params(direction='in')
grid_bpt[ii].set_xticks(np.arange(xtick_limits[ii][0], xtick_limits[ii][1] + 0.5, 0.5))
grid_bpt[ii].set_xticks(np.arange(xtick_limits[ii][0],
xtick_limits[ii][1] + 0.1, 0.1), minor=True)
grid_bpt[ii].set_yticks(np.arange(-1.5, 2.0, 0.5))
grid_bpt[ii].set_yticks(np.arange(-1.5, 1.6, 0.1), minor=True)
grid_bpt[ii].grid(which='minor', alpha=0.2)
grid_bpt[ii].grid(which='major', alpha=0.5)
grid_bpt[ii].set_xlim(xtick_limits[ii][0], xtick_limits[ii][1])
grid_bpt[ii].set_ylim(-1.5, 1.6)
if use_oi:
grid_bpt[ii].set_ylim(-1.5, 1.8)
grid_bpt[ii].spines['top'].set_visible(True)
if ii in [0, 1]:
if not use_oi and ii == 1:
continue
grid_bpt[ii].get_xticklabels()[-1].set_visible(False)
grid_bpt[0].set_ylabel(r'log([OIII]/H$\beta$)')
grid_bpt[0].set_xlabel(r'log([NII]/H$\alpha$)')
grid_bpt[1].set_xlabel(r'log([SII]/H$\alpha$)')
if use_oi:
grid_bpt[2].set_xlabel(r'log([OI]/H$\alpha$)')
gal_bpt[0].grid(False)
return fig, grid_bpt, gal_bpt[0]
def kewley_sf_nii(log_nii_ha):
"""Star forming classification line for log([NII]/Ha)."""
return 0.61 / (log_nii_ha - 0.05) + 1.3
def kewley_sf_sii(log_sii_ha):
"""Star forming classification line for log([SII]/Ha)."""
return 0.72 / (log_sii_ha - 0.32) + 1.3
def kewley_sf_oi(log_oi_ha):
"""Star forming classification line for log([OI]/Ha)."""
return 0.73 / (log_oi_ha + 0.59) + 1.33
def kewley_comp_nii(log_nii_ha):
"""Composite classification line for log([NII]/Ha)."""
return 0.61 / (log_nii_ha - 0.47) + 1.19
def kewley_agn_sii(log_sii_ha):
"""Seyfert/LINER classification line for log([SII]/Ha)."""
return 1.89 * log_sii_ha + 0.76
def kewley_agn_oi(log_oi_ha):
"""Seyfert/LINER classification line for log([OI]/Ha)."""
return 1.18 * log_oi_ha + 1.30
def bpt_kewley06(maps, snr_min=3, return_figure=True, use_oi=True, **kwargs):
"""Returns a classification of ionisation regions, as defined in Kewley+06.
Makes use of the classification system defined by
`Kewley et al. (2006) <https://ui.adsabs.harvard.edu/#abs/2006MNRAS.372..961K/abstract>`_
to return classification masks for different ionisation mechanisms. If ``return_figure=True``,
produces and returns a matplotlib figure with the classification plots (based on
Kewley+06 Fig. 4) and the 2D spatial distribution of classified spaxels (i.e., a map of the
galaxy in which each spaxel is colour-coded based on its emission mechanism).
While it is possible to call this function directly, its normal use will be via the
:func:`~marvin.tools.maps.Maps.get_bpt` method.
Parameters:
maps (a Marvin :class:`~marvin.tools.maps.Maps` object)
The Marvin Maps object that contains the emission line maps to be used to determine
the BPT classification.
snr_min (float or dict):
The signal-to-noise cutoff value for the emission lines used to generate the BPT
diagram. If ``snr_min`` is a single value, that signal-to-noise will be used for all
the lines. Alternatively, a dictionary of signal-to-noise values, with the
emission line channels as keys, can be used.
E.g., ``snr_min={'ha': 5, 'nii': 3, 'oi': 1}``. If some values are not provided,
they will default to ``SNR>=3``. Note that the value ``sii`` will be applied to both
``[SII 6718]`` and ``[SII 6732]``.
return_figure (bool):
If ``True``, it also returns the matplotlib figure_ of the BPT diagram plot,
which can be used to modify the style of the plot.
use_oi (bool):
If ``True``, uses the OI diagnostic diagram for spaxel classification.
Returns:
bpt_return:
``bpt_kewley06`` returns a dictionary of dictionaries of classification masks.
The classification masks (not to be confused with bitmasks) are boolean arrays with the
same shape as the Maps or Cube (without the spectral dimension) that can be used
to select spaxels belonging to a certain excitation process (e.g., star forming).
The returned dictionary has the following keys: ``'sf'`` (star forming), ``'comp'``
(composite), ``'agn'``, ``'seyfert'``, ``'liner'``, ``'invalid'``
(spaxels that are masked out at the DAP level), and ``'ambiguous'`` (good spaxels that
do not fall in any classification or fall in more than one). Each key provides access
to a new dictionary with keys ``'nii'`` (for the constraints in the diagram NII/Halpha
vs OIII/Hbeta), ``'sii'`` (SII/Halpha vs OIII/Hbeta), ``'oi'`` (OI/Halpha vs
OIII/Hbeta; only if ``use_oi=True``), and ``'global'``, which applies all the previous
constraints at once. The ``'ambiguous'`` mask only contains the ``'global'``
subclassification, while the ``'comp'`` dictionary only contains ``'nii'``.
``'nii'`` is not available for ``'seyfert'`` and ``'liner'``. All the global masks are
unique (a spaxel can only belong to one of them) with the exception of ``'agn'``, which
intersects with ``'seyfert'`` and ``'liner'``. Additionally, if ``return_figure=True``,
``bpt_kewley06`` will also return the matplotlib figure for the generated plot, and a
list of axes for each one of the subplots.
Example:
>>> maps_8485_1901 = Maps(plateifu='8485-1901')
>>> bpt_masks, fig, axes = bpt_kewley06(maps_8485_1901)
Gets the global mask for star forming spaxels
>>> sf = bpt_masks['sf']['global']
Gets the seyfert mask based only on the SII/Halpha vs OIII/Hbeta diagnostics
>>> seyfert_sii = bpt_masks['seyfert']['sii']
"""
if 'snr' in kwargs:
warnings.warn('snr is deprecated. Use snr_min instead. '
'snr will be removed in a future version of marvin',
MarvinDeprecationWarning)
snr_min = kwargs.pop('snr')
if len(kwargs.keys()) > 0:
raise MarvinError('unknown keyword {0}'.format(list(kwargs.keys())[0]))
# Gets the necessary emission line maps
oiii = get_masked(maps, 'oiii_5008', snr=get_snr(snr_min, 'oiii'))
nii = get_masked(maps, 'nii_6585', snr=get_snr(snr_min, 'nii'))
ha = get_masked(maps, 'ha_6564', snr=get_snr(snr_min, 'ha'))
hb = get_masked(maps, 'hb_4862', snr=get_snr(snr_min, 'hb'))
oi = get_masked(maps, 'oi_6302', snr=get_snr(snr_min, 'oi'))
sii_6718 = get_masked(maps, 'sii_6718', snr=get_snr(snr_min, 'sii'))
sii_6732 = get_masked(maps, 'sii_6732', snr=get_snr(snr_min, 'sii'))
sii = sii_6718 + sii_6732
# Calculate masked logarithms
log_oiii_hb = np.ma.log10(oiii / hb)
log_nii_ha = np.ma.log10(nii / ha)
log_sii_ha = np.ma.log10(sii / ha)
log_oi_ha = np.ma.log10(oi / ha)
# Calculates masks for each emission mechanism according to the paper boundaries.
# The log_nii_ha < 0.05, log_sii_ha < 0.32, etc are necessary because the classification lines
# diverge and we only want the region before the asymptota.
sf_mask_nii = ((log_oiii_hb < kewley_sf_nii(log_nii_ha)) & (log_nii_ha < 0.05)).filled(False)
sf_mask_sii = ((log_oiii_hb < kewley_sf_sii(log_sii_ha)) & (log_sii_ha < 0.32)).filled(False)
sf_mask_oi = ((log_oiii_hb < kewley_sf_oi(log_oi_ha)) & (log_oi_ha < -0.59)).filled(False)
sf_mask = sf_mask_nii & sf_mask_sii & sf_mask_oi if use_oi else sf_mask_nii & sf_mask_sii
comp_mask = ((log_oiii_hb > kewley_sf_nii(log_nii_ha)) & (log_nii_ha < 0.05)).filled(False) & \
((log_oiii_hb < kewley_comp_nii(log_nii_ha)) & (log_nii_ha < 0.465)).filled(False)
comp_mask &= (sf_mask_sii & sf_mask_oi) if use_oi else sf_mask_sii
agn_mask_nii = ((log_oiii_hb > kewley_comp_nii(log_nii_ha)) |
(log_nii_ha > 0.465)).filled(False)
agn_mask_sii = ((log_oiii_hb > kewley_sf_sii(log_sii_ha)) |
(log_sii_ha > 0.32)).filled(False)
agn_mask_oi = ((log_oiii_hb > kewley_sf_oi(log_oi_ha)) |
(log_oi_ha > -0.59)).filled(False)
agn_mask = agn_mask_nii & agn_mask_sii & agn_mask_oi if use_oi else agn_mask_nii & agn_mask_sii
seyfert_mask_sii = agn_mask & (kewley_agn_sii(log_sii_ha) < log_oiii_hb).filled(False)
seyfert_mask_oi = agn_mask & (kewley_agn_oi(log_oi_ha) < log_oiii_hb).filled(False)
seyfert_mask = seyfert_mask_sii & seyfert_mask_oi if use_oi else seyfert_mask_sii
liner_mask_sii = agn_mask & (kewley_agn_sii(log_sii_ha) > log_oiii_hb).filled(False)
liner_mask_oi = agn_mask & (kewley_agn_oi(log_oi_ha) > log_oiii_hb).filled(False)
liner_mask = liner_mask_sii & liner_mask_oi if use_oi else liner_mask_sii
# The invalid mask is the combination of spaxels that are invalid in all of the emission maps
invalid_mask_nii = ha.mask | oiii.mask | nii.mask | hb.mask
invalid_mask_sii = ha.mask | oiii.mask | sii.mask | hb.mask
invalid_mask_oi = ha.mask | oiii.mask | oi.mask | hb.mask
invalid_mask = ha.mask | oiii.mask | nii.mask | hb.mask | sii.mask
if use_oi:
invalid_mask |= oi.mask
# The ambiguous mask are spaxels that are not invalid but don't fall into any of the
# emission mechanism classifications.
ambiguous_mask = ~(sf_mask | comp_mask | seyfert_mask | liner_mask) & ~invalid_mask
sf_classification = {'global': sf_mask,
'nii': sf_mask_nii,
'sii': sf_mask_sii}
comp_classification = {'global': comp_mask,
'nii': comp_mask}
agn_classification = {'global': agn_mask,
'nii': agn_mask_nii,
'sii': agn_mask_sii}
seyfert_classification = {'global': seyfert_mask,
'sii': seyfert_mask_sii}
liner_classification = {'global': liner_mask,
'sii': liner_mask_sii}
invalid_classification = {'global': invalid_mask,
'nii': invalid_mask_nii,
'sii': invalid_mask_sii}
ambiguous_classification = {'global': ambiguous_mask}
if use_oi:
sf_classification['oi'] = sf_mask_oi
agn_classification['oi'] = agn_mask_oi
seyfert_classification['oi'] = seyfert_mask_oi
liner_classification['oi'] = liner_mask_oi
invalid_classification['oi'] = invalid_mask_oi
bpt_return_classification = {'sf': sf_classification,
'comp': comp_classification,
'agn': agn_classification,
'seyfert': seyfert_classification,
'liner': liner_classification,
'invalid': invalid_classification,
'ambiguous': ambiguous_classification}
if not return_figure:
return bpt_return_classification
# Does all the plotting
with plt.style.context('seaborn-darkgrid'):
fig, grid_bpt, gal_bpt = _get_kewley06_axes(use_oi=use_oi)
sf_kwargs = {'marker': 's', 's': 12, 'color': 'c', 'zorder': 50, 'alpha': 0.7, 'lw': 0.0,
'label': 'Star-forming'}
sf_handler = grid_bpt[0].scatter(log_nii_ha[sf_mask], log_oiii_hb[sf_mask], **sf_kwargs)
grid_bpt[1].scatter(log_sii_ha[sf_mask], log_oiii_hb[sf_mask], **sf_kwargs)
comp_kwargs = {'marker': 's', 's': 12, 'color': 'g', 'zorder': 45, 'alpha': 0.7, 'lw': 0.0,
'label': 'Composite'}
comp_handler = grid_bpt[0].scatter(log_nii_ha[comp_mask], log_oiii_hb[comp_mask],
**comp_kwargs)
grid_bpt[1].scatter(log_sii_ha[comp_mask], log_oiii_hb[comp_mask], **comp_kwargs)
seyfert_kwargs = {'marker': 's', 's': 12, 'color': 'r', 'zorder': 40, 'alpha': 0.7, 'lw': 0.0,
'label': 'Seyfert'}
seyfert_handler = grid_bpt[0].scatter(log_nii_ha[seyfert_mask], log_oiii_hb[seyfert_mask],
**seyfert_kwargs)
grid_bpt[1].scatter(log_sii_ha[seyfert_mask], log_oiii_hb[seyfert_mask], **seyfert_kwargs)
liner_kwargs = {'marker': 's', 's': 12, 'color': 'm', 'zorder': 35, 'alpha': 0.7, 'lw': 0.0,
'label': 'LINER'}
liner_handler = grid_bpt[0].scatter(log_nii_ha[liner_mask], log_oiii_hb[liner_mask],
**liner_kwargs)
grid_bpt[1].scatter(log_sii_ha[liner_mask], log_oiii_hb[liner_mask], **liner_kwargs)
amb_kwargs = {'marker': 's', 's': 12, 'color': '0.6', 'zorder': 30, 'alpha': 0.7, 'lw': 0.0,
'label': 'Ambiguous '}
amb_handler = grid_bpt[0].scatter(log_nii_ha[ambiguous_mask], log_oiii_hb[ambiguous_mask],
**amb_kwargs)
grid_bpt[1].scatter(log_sii_ha[ambiguous_mask], log_oiii_hb[ambiguous_mask], **amb_kwargs)
if use_oi:
grid_bpt[2].scatter(log_oi_ha[sf_mask], log_oiii_hb[sf_mask], **sf_kwargs)
grid_bpt[2].scatter(log_oi_ha[comp_mask], log_oiii_hb[comp_mask], **comp_kwargs)
grid_bpt[2].scatter(log_oi_ha[seyfert_mask], log_oiii_hb[seyfert_mask], **seyfert_kwargs)
grid_bpt[2].scatter(log_oi_ha[liner_mask], log_oiii_hb[liner_mask], **liner_kwargs)
grid_bpt[2].scatter(log_oi_ha[ambiguous_mask], log_oiii_hb[ambiguous_mask], **amb_kwargs)
# Creates the legend
grid_bpt[0].legend([sf_handler, comp_handler, seyfert_handler, liner_handler, amb_handler],
['Star-forming', 'Composite', 'Seyfert', 'LINER', 'Ambiguous'], ncol=2,
loc='upper left', frameon=True, labelspacing=0.1, columnspacing=0.1,
handletextpad=0.1, fontsize=9)
# Creates a RGB image of the galaxy, and sets the colours of the spaxels to match the
# classification masks
gal_rgb = np.zeros((ha.shape[0], ha.shape[1], 3), dtype=np.uint8)
for ii in [1, 2]: # Cyan
gal_rgb[:, :, ii][sf_mask] = 255
gal_rgb[:, :, 1][comp_mask] = 128 # Green
gal_rgb[:, :, 0][seyfert_mask] = 255 # Red
# Magenta
gal_rgb[:, :, 0][liner_mask] = 255
gal_rgb[:, :, 2][liner_mask] = 255
for ii in [0, 1, 2]:
gal_rgb[:, :, ii][invalid_mask] = 255 # White
gal_rgb[:, :, ii][ambiguous_mask] = 169 # Grey
# Shows the image.
gal_bpt.imshow(gal_rgb, origin='lower', aspect='auto', interpolation='nearest')
gal_bpt.set_xlim(0, ha.shape[1] - 1)
gal_bpt.set_ylim(0, ha.shape[0] - 1)
gal_bpt.set_xlabel('x [spaxels]')
gal_bpt.set_ylabel('y [spaxels]')
axes = grid_bpt.axes_all + [gal_bpt]
# Adds custom method to create figure
for ax in axes:
setattr(ax.__class__, 'bind_to_figure', _bind_to_figure)
return (bpt_return_classification, fig, axes)
def _bind_to_figure(self, fig=None):
"""Copies axes to a new figure.
Uses ``marvin.utils.plot.utils.bind_to_figure`` with a number
of tweaks.
"""
new_figure = bind_to_figure(self, fig=fig)
if new_figure.axes[0].get_ylabel() == '':
new_figure.axes[0].set_ylabel('log([OIII]/H$\\beta$)')
return new_figure
| [
"marvin.utils.plot.bind_to_figure"
] | [((1783, 1810), 'matplotlib.pyplot.figure', 'plt.figure', (['None', '(8.5, 10)'], {}), '(None, (8.5, 10))\n', (1793, 1810), True, 'import matplotlib.pyplot as plt\n'), ((1830, 1885), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'top': '(0.99)', 'bottom': '(0.08)', 'hspace': '(0.01)'}), '(top=0.99, bottom=0.08, hspace=0.01)\n', (1849, 1885), True, 'import matplotlib.pyplot as plt\n'), ((2048, 2195), 'mpl_toolkits.axes_grid1.ImageGrid', 'ImageGrid', (['fig', '(211)'], {'nrows_ncols': '((1, 3) if use_oi else (1, 2))', 'direction': '"""row"""', 'axes_pad': '(0.1)', 'label_mode': '"""L"""', 'share_all': '(False)'}), "(fig, 211, nrows_ncols=(1, 3) if use_oi else (1, 2), direction=\n 'row', axes_pad=0.1, label_mode='L', share_all=False, **imgrid_kwargs)\n", (2057, 2195), False, 'from mpl_toolkits.axes_grid1 import ImageGrid\n'), ((2369, 2408), 'mpl_toolkits.axes_grid1.ImageGrid', 'ImageGrid', (['fig', '(212)'], {'nrows_ncols': '(1, 1)'}), '(fig, 212, nrows_ncols=(1, 1))\n', (2378, 2408), False, 'from mpl_toolkits.axes_grid1 import ImageGrid\n'), ((2675, 2698), 'numpy.array', 'np.array', (['[-0.308, 1.0]'], {}), '([-0.308, 1.0])\n', (2683, 2698), True, 'import numpy as np\n'), ((2715, 2737), 'numpy.array', 'np.array', (['[-1.12, 0.5]'], {}), '([-1.12, 0.5])\n', (2723, 2737), True, 'import numpy as np\n'), ((11267, 11289), 'numpy.ma.log10', 'np.ma.log10', (['(oiii / hb)'], {}), '(oiii / hb)\n', (11278, 11289), True, 'import numpy as np\n'), ((11307, 11328), 'numpy.ma.log10', 'np.ma.log10', (['(nii / ha)'], {}), '(nii / ha)\n', (11318, 11328), True, 'import numpy as np\n'), ((11346, 11367), 'numpy.ma.log10', 'np.ma.log10', (['(sii / ha)'], {}), '(sii / ha)\n', (11357, 11367), True, 'import numpy as np\n'), ((11384, 11404), 'numpy.ma.log10', 'np.ma.log10', (['(oi / ha)'], {}), '(oi / ha)\n', (11395, 11404), True, 'import numpy as np\n'), ((18444, 18499), 'numpy.zeros', 'np.zeros', (['(ha.shape[0], ha.shape[1], 3)'], {'dtype': 'np.uint8'}), '((ha.shape[0], ha.shape[1], 3), dtype=np.uint8)\n', (18452, 18499), True, 'import numpy as np\n'), ((19571, 19600), 'marvin.utils.plot.bind_to_figure', 'bind_to_figure', (['self'], {'fig': 'fig'}), '(self, fig=fig)\n', (19585, 19600), False, 'from marvin.utils.plot import bind_to_figure\n'), ((10330, 10472), 'warnings.warn', 'warnings.warn', (['"""snr is deprecated. Use snr_min instead. snr will be removed in a future version of marvin"""', 'MarvinDeprecationWarning'], {}), "(\n 'snr is deprecated. Use snr_min instead. snr will be removed in a future version of marvin'\n , MarvinDeprecationWarning)\n", (10343, 10472), False, 'import warnings\n'), ((15550, 15587), 'matplotlib.pyplot.style.context', 'plt.style.context', (['"""seaborn-darkgrid"""'], {}), "('seaborn-darkgrid')\n", (15567, 15587), True, 'import matplotlib.pyplot as plt\n'), ((1978, 2007), 'packaging.version.parse', 'parse', (['matplotlib.__version__'], {}), '(matplotlib.__version__)\n', (1983, 2007), False, 'from packaging.version import parse\n'), ((2010, 2024), 'packaging.version.parse', 'parse', (['"""3.5.0"""'], {}), "('3.5.0')\n", (2015, 2024), False, 'from packaging.version import parse\n'), ((4349, 4411), 'numpy.arange', 'np.arange', (['xtick_limits[ii][0]', '(xtick_limits[ii][1] + 0.5)', '(0.5)'], {}), '(xtick_limits[ii][0], xtick_limits[ii][1] + 0.5, 0.5)\n', (4358, 4411), True, 'import numpy as np\n'), ((4445, 4507), 'numpy.arange', 'np.arange', (['xtick_limits[ii][0]', '(xtick_limits[ii][1] + 0.1)', '(0.1)'], {}), '(xtick_limits[ii][0], xtick_limits[ii][1] + 0.1, 0.1)\n', (4454, 4507), True, 'import numpy as np\n'), ((4595, 4620), 'numpy.arange', 'np.arange', (['(-1.5)', '(2.0)', '(0.5)'], {}), '(-1.5, 2.0, 0.5)\n', (4604, 4620), True, 'import numpy as np\n'), ((4654, 4679), 'numpy.arange', 'np.arange', (['(-1.5)', '(1.6)', '(0.1)'], {}), '(-1.5, 1.6, 0.1)\n', (4663, 4679), True, 'import numpy as np\n')] |
# Imports
from time import sleep as wait # import sleep as wait for timer
from marvin.essentials import speak # speak function
from word2number import w2n
##############################
# File containing Timer code #
##############################
class TimerService():
def __init__(self, time_for, speak_type):
self.speak_type = speak_type
self.time_for = time_for
self.bob = 1
try:
if time_for == '':
raise Exception
except Exception:
self.bob = 0
def timerLogic(self):
time_for_timer = self.time_for.split(" ")[0]
if time_for_timer.lower() == 'zero' or time_for_timer == '0':
self.bob = 0
speak('You can\'t have a timer for 0 time')
if self.bob >= 1:
time_unit = marvin.essentials.splitJoin(self.time_for, 1)
if time_unit == '':
time_unit = 'minutes'
try:
bob = float(time_for_timer)
self.time_for_timer = float(time_for_timer)
except ValueError:
self.time_for_timer = float(w2n.word_to_num(str(time_for_timer)))
if 'min' in time_unit:
abs_time = abs(float(self.time_for_timer))
seconds_in_minutes = abs_time * 60
self.timer(seconds_in_minutes)
elif 'sec' in time_unit:
abs_time = abs(float(self.time_for_timer))
if abs_time >= 5.0:
self.timer(float(abs_time))
else:
speak('Any timer less than 5 seconds is to small count thousands', self.speak_type)
elif 'hr' in time_unit:
abs_time = abs(float(self.time_for_timer))
if abs_time <= 3:
seconds_in_hour = abs_time * 3600
self.timer(seconds_in_hour)
else:
speak('Timer does not support reminders over 3 hours use a calander reminder for long reminders', self.speak_type)
elif 'day' in time_unit:
speak('Timer does not support days use a calander reminder for long reminders', self.speak_type)
else:
speak('I couldn\'t find the time unit you wanted to use', self.speak_type)
else:
speak('You need to input a number for the timer', self.speak_type)
def timer(self, delay):
print('Timer Started')
time.sleep(float(delay))
speak('Timer Done!', self.speak_type) | [
"marvin.essentials.speak"
] | [((2512, 2549), 'marvin.essentials.speak', 'speak', (['"""Timer Done!"""', 'self.speak_type'], {}), "('Timer Done!', self.speak_type)\n", (2517, 2549), False, 'from marvin.essentials import speak\n'), ((725, 767), 'marvin.essentials.speak', 'speak', (['"""You can\'t have a timer for 0 time"""'], {}), '("You can\'t have a timer for 0 time")\n', (730, 767), False, 'from marvin.essentials import speak\n'), ((2344, 2410), 'marvin.essentials.speak', 'speak', (['"""You need to input a number for the timer"""', 'self.speak_type'], {}), "('You need to input a number for the timer', self.speak_type)\n", (2349, 2410), False, 'from marvin.essentials import speak\n'), ((1585, 1673), 'marvin.essentials.speak', 'speak', (['"""Any timer less than 5 seconds is to small count thousands"""', 'self.speak_type'], {}), "('Any timer less than 5 seconds is to small count thousands', self.\n speak_type)\n", (1590, 1673), False, 'from marvin.essentials import speak\n'), ((1943, 2067), 'marvin.essentials.speak', 'speak', (['"""Timer does not support reminders over 3 hours use a calander reminder for long reminders"""', 'self.speak_type'], {}), "(\n 'Timer does not support reminders over 3 hours use a calander reminder for long reminders'\n , self.speak_type)\n", (1948, 2067), False, 'from marvin.essentials import speak\n'), ((2112, 2212), 'marvin.essentials.speak', 'speak', (['"""Timer does not support days use a calander reminder for long reminders"""', 'self.speak_type'], {}), "('Timer does not support days use a calander reminder for long reminders',\n self.speak_type)\n", (2117, 2212), False, 'from marvin.essentials import speak\n'), ((2243, 2316), 'marvin.essentials.speak', 'speak', (['"""I couldn\'t find the time unit you wanted to use"""', 'self.speak_type'], {}), '("I couldn\'t find the time unit you wanted to use", self.speak_type)\n', (2248, 2316), False, 'from marvin.essentials import speak\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: <NAME>, <NAME>, and <NAME>
# @Date: 2017-03-20
# @Filename: conftest.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
#
# @Last modified by: <NAME>
# @Last modified time: 2018-07-21 21:51:06
import copy
import itertools
import os
import warnings
from collections import OrderedDict
import pytest
import yaml
from brain import bconfig
from flask_jwt_extended import tokens
from sdss_access.path import Path
from marvin import config, marvindb
from marvin.api.api import Interaction
from marvin.tools.cube import Cube
from marvin.tools.maps import Maps
from marvin.tools.modelcube import ModelCube
from marvin.tools.query import Query
from marvin.utils.datamodel.dap import datamodel
from marvin.utils.general import check_versions
from brain.utils.general import get_yaml_loader
warnings.simplefilter('always')
# PYTEST MODIFIERS
# -----------------
def pytest_addoption(parser):
"""Add new options"""
# run slow tests
parser.addoption('--runslow', action='store_true', default=False, help='Run slow tests.')
# control releases run
parser.addoption('--travis-only', action='store_true', default=False, help='Run a Travis only subset')
def pytest_runtest_setup(item):
"""Skip slow tests."""
if 'slow' in item.keywords and not item.config.getoption('--runslow'):
pytest.skip('Requires --runslow option to run.')
def pytest_configure(config):
''' Runs during configuration of conftest. Checks and sets a global instance for a
TravisSubset based on the pytest command line input of --travis-only
'''
option = config.getoption('--travis-only')
global travis
if option:
travis = TravisSubset()
# specific release instance
travis = None
class TravisSubset(object):
def __init__(self):
self.new_gals = ['8485-1901']
self.new_releases = ['MPL-6']
self.new_bintypes = ['SPX', 'HYB10']
self.new_templates = ['GAU-MILESHC']
self.new_dbs = ['nodb']
self.new_origins = ['file', 'api']
self.new_modes = ['local', 'remote', 'auto']
# Global Parameters for FIXTURES
# ------------------------------
#releases = ['MPL-6', 'MPL-5', 'MPL-4'] # to loop over releases (see release fixture)
releases = ['MPL-8']
bintypes_accepted = {'MPL-4': ['NONE', 'VOR10'],
'MPL-5': ['SPX', 'VOR10'],
'MPL-6': ['SPX', 'HYB10'],
'MPL-7': ['HYB10', 'VOR10'],
'MPL-8': ['HYB10', 'SPX']}
templates_accepted = {'MPL-4': ['MIUSCAT_THIN', 'MILES_THIN'],
'MPL-5': ['GAU-MILESHC'],
'MPL-6': ['GAU-MILESHC'],
'MPL-7': ['GAU-MILESHC'],
'MPL-8': ['MILESHC-MILESHC']}
def populate_bintypes_templates(releases):
''' Generates bintype and template dictionaries for each release '''
bintypes = OrderedDict((release, []) for release in releases)
templates = OrderedDict((release, []) for release in releases)
for release in releases:
bintemps = datamodel[release].get_bintemps()
for bintemp in bintemps:
bintype = bintemp.split('-')[0]
template = '-'.join(bintemp.split('-')[1:])
if release in bintypes_accepted and bintype not in bintypes_accepted[release]:
continue
if release in templates_accepted and template not in templates_accepted[release]:
continue
if bintype not in bintypes[release]:
bintypes[release].append(bintype)
if template not in templates[release]:
templates[release].append(template)
return bintypes, templates
bintypes, templates = populate_bintypes_templates(releases)
# TODO reduce modes to only local and remote
modes = ['local', 'remote', 'auto'] # to loop over modes (see mode fixture)
dbs = ['db', 'nodb'] # to loop over dbs (see db fixture)
origins = ['file', 'db', 'api'] # to loop over data origins (see data_origin fixture)
# Galaxy and Query data is stored in a YAML file
with open(os.path.join(os.path.dirname(__file__), 'data/galaxy_test_data.dat')) as f:
galaxy_data = yaml.load(f, Loader=get_yaml_loader())
with open(os.path.join(os.path.dirname(__file__), 'data/query_test_data.dat')) as f:
query_data = yaml.load(f, Loader=get_yaml_loader())
@pytest.fixture(scope='session', params=releases)
def release(request):
"""Yield a release."""
if travis and request.param not in travis.new_releases:
pytest.skip('Skipping non-requested release')
return request.param
def _get_release_generator_chain():
"""Return all valid combinations of (release, bintype, template)."""
return itertools.chain(*[itertools.product([release], bintypes[release],
templates[release]) for release in releases])
def _params_ids(fixture_value):
"""Return a test id for the release chain."""
return '-'.join(fixture_value)
@pytest.fixture(scope='session', params=sorted(_get_release_generator_chain()), ids=_params_ids)
def get_params(request):
"""Yield a tuple of (release, bintype, template)."""
# placeholder until the marvin_test_if decorator works in 2.7
release, bintype, template = request.param
if travis and release not in travis.new_releases:
pytest.skip('Skipping non-requested release')
if travis and bintype not in travis.new_bintypes:
pytest.skip('Skipping non-requested bintype')
if travis and template not in travis.new_templates:
pytest.skip('Skipping non-requested template')
return request.param
@pytest.fixture(scope='session', params=sorted(galaxy_data.keys()))
def plateifu(request):
"""Yield a plate-ifu."""
if travis and request.param not in travis.new_gals:
pytest.skip('Skipping non-requested galaxies')
return request.param
@pytest.fixture(scope='session', params=origins)
def data_origin(request):
"""Yield a data access mode."""
if travis and request.param not in travis.new_origins:
pytest.skip('Skipping non-requested origins')
return request.param
@pytest.fixture(scope='session', params=modes)
def mode(request):
"""Yield a data mode."""
if travis and request.param not in travis.new_modes:
pytest.skip('Skipping non-requested modes')
return request.param
# Config-based FIXTURES
# ----------------------
@pytest.fixture(scope='session', autouse=True)
def set_config():
"""Set config."""
config.use_sentry = False
config.add_github_message = False
config._traceback = None
@pytest.fixture()
def check_config():
"""Check the config to see if a db is on."""
return config.db is None
URLMAP = None
def set_sasurl(loc='local', port=None):
"""Set the sasurl to local or test-utah, and regenerate the urlmap."""
if not port:
port = int(os.environ.get('LOCAL_MARVIN_PORT', 5000))
istest = True if loc == 'utah' else False
config.switchSasUrl(loc, test=istest, port=port)
global URLMAP
if not URLMAP:
response = Interaction('/marvin/api/general/getroutemap', request_type='get', auth='netrc')
config.urlmap = response.getRouteMap()
URLMAP = config.urlmap
@pytest.fixture(scope='session', autouse=True)
def saslocal():
"""Set sasurl to local."""
set_sasurl(loc='local')
@pytest.fixture(scope='session')
def urlmap(saslocal):
"""Yield the config urlmap."""
return config.urlmap
@pytest.fixture(scope='session')
def set_release(release):
"""Set the release in the config."""
config.setMPL(release)
@pytest.fixture(scope='session')
def versions(release):
"""Yield the DRP and DAP versions for a release."""
drpver, dapver = config.lookUpVersions(release)
return drpver, dapver
@pytest.fixture(scope='session')
def drpver(versions):
"""Return DRP version."""
drpver, __ = versions
return drpver
@pytest.fixture(scope='session')
def dapver(versions):
"""Return DAP version."""
__, dapver = versions
return dapver
def set_the_config(release):
"""Set config release without parametrizing.
Using ``set_release`` combined with ``galaxy`` double parametrizes!"""
config.access = 'collab'
config.setRelease(release)
set_sasurl(loc='local')
config.login()
config._traceback = None
def custom_login():
config.token = tokens.encode_access_token('test', os.environ.get('MARVIN_SECRET'), 'HS256', False, True, 'user_claims', True, 'identity', 'user_claims')
def custom_auth(self, authtype=None):
authtype = 'token'
super(Interaction, self).setAuth(authtype=authtype)
# DB-based FIXTURES
# -----------------
class DB(object):
"""Object representing aspects of the marvin db.
Useful for tests needing direct DB access.
"""
def __init__(self):
"""Initialize with DBs."""
self._marvindb = marvindb
self.session = marvindb.session
self.datadb = marvindb.datadb
self.sampledb = marvindb.sampledb
self.dapdb = marvindb.dapdb
@pytest.fixture(scope='session')
def maindb():
"""Yield an instance of the DB object."""
yield DB()
@pytest.fixture(scope='function')
def db_off():
"""Turn the DB off for a test, and reset it after."""
config.forceDbOff()
yield
config.forceDbOn()
@pytest.fixture(autouse=True)
def db_on():
"""Automatically turn on the DB at collection time."""
config.forceDbOn()
@pytest.fixture()
def usedb(request):
''' fixture for optional turning off the db '''
if request.param:
config.forceDbOn()
else:
config.forceDbOff()
return config.db is not None
@pytest.fixture(params=dbs)
def db(request):
"""Turn local db on or off.
Use this to parametrize over all db options.
"""
if travis and request.param not in travis.new_dbs:
pytest.skip('Skipping non-requested dbs')
if request.param == 'db':
config.forceDbOn()
else:
config.forceDbOff()
yield config.db is not None
config.forceDbOn()
@pytest.fixture()
def exporigin(mode, db):
"""Return the expected origins for a given db/mode combo."""
if mode == 'local' and not db:
return 'file'
elif mode == 'local' and db:
return 'db'
elif mode == 'remote' and not db:
return 'api'
elif mode == 'remote' and db:
return 'api'
elif mode == 'auto' and db:
return 'db'
elif mode == 'auto' and not db:
return 'file'
@pytest.fixture()
def expmode(mode, db):
''' expected modes for a given db/mode combo '''
if mode == 'local' and not db:
return None
elif mode == 'local' and db:
return 'local'
elif mode == 'remote' and not db:
return 'remote'
elif mode == 'remote' and db:
return 'remote'
elif mode == 'auto' and db:
return 'local'
elif mode == 'auto' and not db:
return 'remote'
@pytest.fixture()
def user(maindb):
username = 'test'
password = '<PASSWORD>'
model = maindb.datadb.User
user = maindb.session.query(model).filter(model.username == username).one_or_none()
if not user:
user = model(username=username, login_count=1)
user.set_password(password)
maindb.session.add(user)
yield user
maindb.session.delete(user)
# Monkeypatch-based FIXTURES
# --------------------------
@pytest.fixture()
def monkeyconfig(request, monkeypatch):
"""Monkeypatch a variable on the Marvin config.
Example at line 160 in utils/test_general.
"""
name, value = request.param
monkeypatch.setattr(config, name, value=value)
@pytest.fixture()
def monkeymanga(monkeypatch, temp_scratch):
"""Monkeypatch the environ to create a temp SAS dir for reading/writing/downloading.
Example at line 141 in utils/test_images.
"""
monkeypatch.setitem(os.environ, 'SAS_BASE_DIR', str(temp_scratch))
monkeypatch.setitem(os.environ, 'MANGA_SPECTRO_REDUX',
str(temp_scratch.join('mangawork/manga/spectro/redux')))
monkeypatch.setitem(os.environ, 'MANGA_SPECTRO_ANALYSIS',
str(temp_scratch.join('mangawork/manga/spectro/analysis')))
@pytest.fixture()
def monkeyauth(monkeypatch):
monkeypatch.setattr(config, 'login', custom_login)
monkeypatch.setattr(Interaction, 'setAuth', custom_auth)
monkeypatch.setattr(bconfig, '_public_api_url', config.sasurl)
monkeypatch.setattr(bconfig, '_collab_api_url', config.sasurl)
# Temp Dir/File-based FIXTURES
# ----------------------------
@pytest.fixture(scope='function')
def temp_scratch(tmpdir_factory):
"""Create a temporary scratch space for reading/writing.
Use for creating temp dirs and files.
Example at line 208 in tools/test_query, line 254 in tools/test_results, and
misc/test_marvin_pickle.
"""
fn = tmpdir_factory.mktemp('scratch')
return fn
def tempafile(path, temp_scratch):
"""Return a pytest temporary file given the original file path.
Example at line 141 in utils/test_images.
"""
redux = os.getenv('MANGA_SPECTRO_REDUX')
anal = os.getenv('MANGA_SPECTRO_ANALYSIS')
endredux = path.partition(redux)[-1]
endanal = path.partition(anal)[-1]
end = (endredux or endanal)
return temp_scratch.join(end)
# Object-based FIXTURES
# ---------------------
class Galaxy(object):
"""An example galaxy for Marvin-tools testing."""
sasbasedir = os.getenv('SAS_BASE_DIR')
mangaredux = os.getenv('MANGA_SPECTRO_REDUX')
mangaanalysis = os.getenv('MANGA_SPECTRO_ANALYSIS')
dir3d = 'stack'
def __init__(self, plateifu):
"""Initialize plate and ifu."""
self.plateifu = plateifu
self.plate, self.ifu = self.plateifu.split('-')
self.plate = int(self.plate)
def set_galaxy_data(self, data_origin=None):
"""Set galaxy properties from the configuration file."""
if self.plateifu not in galaxy_data:
return
data = copy.deepcopy(galaxy_data[self.plateifu])
for key in data.keys():
setattr(self, key, data[key])
# sets specfic data per release
releasedata = self.releasedata[self.release]
for key in releasedata.keys():
setattr(self, key, releasedata[key])
# remap NSA drpall names for MPL-4 vs 5+
drpcopy = self.nsa_data['drpall'].copy()
for key, val in self.nsa_data['drpall'].items():
if isinstance(val, list):
newval, newkey = drpcopy.pop(key)
if self.release == 'MPL-4':
drpcopy[newkey] = newval
else:
drpcopy[key] = newval
self.nsa_data['drpall'] = drpcopy
def set_params(self, bintype=None, template=None, release=None):
"""Set bintype, template, etc."""
self.release = release
self.drpver, self.dapver = config.lookUpVersions(self.release)
self.drpall = 'drpall-{0}.fits'.format(self.drpver)
self.bintype = datamodel[self.dapver].get_bintype(bintype)
self.template = datamodel[self.dapver].get_template(template)
self.bintemp = '{0}-{1}'.format(self.bintype.name, self.template.name)
if release == 'MPL-4':
self.niter = int('{0}{1}'.format(self.template.n, self.bintype.n))
else:
self.niter = '*'
self.access_kwargs = {'plate': self.plate, 'ifu': self.ifu, 'drpver': self.drpver,
'dapver': self.dapver, 'dir3d': self.dir3d, 'mpl': self.release,
'bintype': self.bintype.name, 'n': self.niter, 'mode': '*',
'daptype': self.bintemp}
def set_filepaths(self, pathtype='full'):
"""Set the paths for cube, maps, etc."""
self.path = Path()
if check_versions(self.drpver, 'v2_5_3'):
self.imgpath = self.path.__getattribute__(pathtype)('mangaimagenew', **self.access_kwargs)
else:
self.imgpath = self.path.__getattribute__(pathtype)('mangaimage', **self.access_kwargs)
self.cubepath = self.path.__getattribute__(pathtype)('mangacube', **self.access_kwargs)
self.rsspath = self.path.__getattribute__(pathtype)('mangarss', **self.access_kwargs)
if self.release == 'MPL-4':
self.mapspath = self.path.__getattribute__(pathtype)('mangamap', **self.access_kwargs)
self.modelpath = None
else:
self.access_kwargs.pop('mode')
self.mapspath = self.path.__getattribute__(pathtype)('mangadap5', mode='MAPS',
**self.access_kwargs)
self.modelpath = self.path.__getattribute__(pathtype)('mangadap5', mode='LOGCUBE',
**self.access_kwargs)
def get_location(self, path):
"""Extract the location from the input path."""
return self.path.location("", full=path)
def partition_path(self, path):
"""Partition the path into non-redux/analysis parts."""
endredux = path.partition(self.mangaredux)[-1]
endanalysis = path.partition(self.mangaanalysis)[-1]
end = (endredux or endanalysis)
return end
def new_path(self, name, newvar):
''' Sets a new path with the subsituted name '''
access_copy = self.access_kwargs.copy()
access_copy['mode'] = '*'
access_copy.update(**newvar)
if name == 'maps':
access_copy['mode'] = 'MAPS'
name = 'mangamap' if self.release == 'MPL-4' else 'mangadap5'
elif name == 'modelcube':
access_copy['mode'] = 'LOGCUBE'
name = None if self.release == 'MPL-4' else 'mangadap5'
path = self.path.full(name, **access_copy) if name else None
return path
@pytest.fixture(scope='function')
def galaxy(monkeyauth, get_params, plateifu):
"""Yield an instance of a Galaxy object for use in tests."""
release, bintype, template = get_params
set_the_config(release)
gal = Galaxy(plateifu=plateifu)
gal.set_params(bintype=bintype, template=template, release=release)
gal.set_filepaths()
gal.set_galaxy_data()
yield gal
gal = None
@pytest.fixture(scope='function')
def cube(galaxy, exporigin, mode):
''' Yield a Marvin Cube based on the expected origin combo of (mode+db).
Fixture tests 6 cube origins from (mode+db) combos [file, db and api]
'''
if str(galaxy.bintype) != 'SPX':
pytest.skip()
if exporigin == 'file':
c = Cube(filename=galaxy.cubepath, release=galaxy.release, mode=mode)
else:
c = Cube(plateifu=galaxy.plateifu, release=galaxy.release, mode=mode)
c.exporigin = exporigin
c.initial_mode = mode
yield c
c = None
@pytest.fixture(scope='function')
def modelcube(galaxy, exporigin, mode):
''' Yield a Marvin ModelCube based on the expected origin combo of (mode+db).
Fixture tests 6 modelcube origins from (mode+db) combos [file, db and api]
'''
if exporigin == 'file':
mc = ModelCube(filename=galaxy.modelpath, release=galaxy.release, mode=mode, bintype=galaxy.bintype)
else:
mc = ModelCube(plateifu=galaxy.plateifu, release=galaxy.release, mode=mode, bintype=galaxy.bintype)
mc.exporigin = exporigin
mc.initial_mode = mode
yield mc
mc = None
@pytest.fixture(scope='function')
def maps(galaxy, exporigin, mode):
''' Yield a Marvin Maps based on the expected origin combo of (mode+db).
Fixture tests 6 cube origins from (mode+db) combos [file, db and api]
'''
if exporigin == 'file':
m = Maps(filename=galaxy.mapspath, release=galaxy.release, mode=mode, bintype=galaxy.bintype)
else:
m = Maps(plateifu=galaxy.plateifu, release=galaxy.release, mode=mode, bintype=galaxy.bintype)
m.exporigin = exporigin
yield m
m = None
modes = ['local', 'remote', 'auto'] # to loop over modes (see mode fixture)
dbs = ['db', 'nodb'] # to loop over dbs (see db fixture)
origins = ['file', 'db', 'api'] # to loop over data origins (see data_origin fixture)
@pytest.fixture(scope='class')
def maps_release_only(release):
return Maps(plateifu='8485-1901', release=release)
@pytest.fixture(scope='function')
def query(request, allow_dap, monkeyauth, release, mode, db):
''' Yields a Query that loops over all modes and db options '''
data = query_data[release]
set_the_config(release)
if mode == 'local' and not db:
pytest.skip('cannot use queries in local mode without a db')
searchfilter = request.param if hasattr(request, 'param') else None
q = Query(search_filter=searchfilter, mode=mode, release=release)
q.expdata = data
if q.mode == 'remote':
pytest.xfail('cannot control for DAP spaxel queries on server side; failing all remotes until then')
yield q
config.forceDbOn()
q = None
# @pytest.fixture(autouse=True)
# def skipall():
# pytest.skip('skipping everything')
| [
"marvin.config.login",
"marvin.config.forceDbOn",
"marvin.config.forceDbOff",
"marvin.config.setMPL",
"marvin.tools.modelcube.ModelCube",
"marvin.tools.maps.Maps",
"marvin.config.getoption",
"marvin.api.api.Interaction",
"marvin.tools.query.Query",
"marvin.utils.general.check_versions",
"marvin.... | [((872, 903), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (893, 903), False, 'import warnings\n'), ((4462, 4510), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'params': 'releases'}), "(scope='session', params=releases)\n", (4476, 4510), False, 'import pytest\n'), ((6011, 6058), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'params': 'origins'}), "(scope='session', params=origins)\n", (6025, 6058), False, 'import pytest\n'), ((6262, 6307), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'params': 'modes'}), "(scope='session', params=modes)\n", (6276, 6307), False, 'import pytest\n'), ((6542, 6587), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", (6556, 6587), False, 'import pytest\n'), ((6728, 6744), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (6742, 6744), False, 'import pytest\n'), ((7371, 7416), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", (7385, 7416), False, 'import pytest\n'), ((7495, 7526), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (7509, 7526), False, 'import pytest\n'), ((7612, 7643), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (7626, 7643), False, 'import pytest\n'), ((7741, 7772), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (7755, 7772), False, 'import pytest\n'), ((7933, 7964), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (7947, 7964), False, 'import pytest\n'), ((8064, 8095), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (8078, 8095), False, 'import pytest\n'), ((9205, 9236), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (9219, 9236), False, 'import pytest\n'), ((9315, 9347), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (9329, 9347), False, 'import pytest\n'), ((9480, 9508), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (9494, 9508), False, 'import pytest\n'), ((9607, 9623), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (9621, 9623), False, 'import pytest\n'), ((9819, 9845), 'pytest.fixture', 'pytest.fixture', ([], {'params': 'dbs'}), '(params=dbs)\n', (9833, 9845), False, 'import pytest\n'), ((10211, 10227), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (10225, 10227), False, 'import pytest\n'), ((10655, 10671), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (10669, 10671), False, 'import pytest\n'), ((11097, 11113), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (11111, 11113), False, 'import pytest\n'), ((11550, 11566), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (11564, 11566), False, 'import pytest\n'), ((11801, 11817), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (11815, 11817), False, 'import pytest\n'), ((12366, 12382), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (12380, 12382), False, 'import pytest\n'), ((12727, 12759), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (12741, 12759), False, 'import pytest\n'), ((18064, 18096), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (18078, 18096), False, 'import pytest\n'), ((18471, 18503), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (18485, 18503), False, 'import pytest\n'), ((19042, 19074), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (19056, 19074), False, 'import pytest\n'), ((19629, 19661), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (19643, 19661), False, 'import pytest\n'), ((20410, 20439), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""class"""'}), "(scope='class')\n", (20424, 20439), False, 'import pytest\n'), ((20530, 20562), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (20544, 20562), False, 'import pytest\n'), ((1661, 1694), 'marvin.config.getoption', 'config.getoption', (['"""--travis-only"""'], {}), "('--travis-only')\n", (1677, 1694), False, 'from marvin import config, marvindb\n'), ((2965, 3015), 'collections.OrderedDict', 'OrderedDict', (['((release, []) for release in releases)'], {}), '((release, []) for release in releases)\n', (2976, 3015), False, 'from collections import OrderedDict\n'), ((3032, 3082), 'collections.OrderedDict', 'OrderedDict', (['((release, []) for release in releases)'], {}), '((release, []) for release in releases)\n', (3043, 3082), False, 'from collections import OrderedDict\n'), ((7104, 7152), 'marvin.config.switchSasUrl', 'config.switchSasUrl', (['loc'], {'test': 'istest', 'port': 'port'}), '(loc, test=istest, port=port)\n', (7123, 7152), False, 'from marvin import config, marvindb\n'), ((7715, 7737), 'marvin.config.setMPL', 'config.setMPL', (['release'], {}), '(release)\n', (7728, 7737), False, 'from marvin import config, marvindb\n'), ((7873, 7903), 'marvin.config.lookUpVersions', 'config.lookUpVersions', (['release'], {}), '(release)\n', (7894, 7903), False, 'from marvin import config, marvindb\n'), ((8381, 8407), 'marvin.config.setRelease', 'config.setRelease', (['release'], {}), '(release)\n', (8398, 8407), False, 'from marvin import config, marvindb\n'), ((8440, 8454), 'marvin.config.login', 'config.login', ([], {}), '()\n', (8452, 8454), False, 'from marvin import config, marvindb\n'), ((9424, 9443), 'marvin.config.forceDbOff', 'config.forceDbOff', ([], {}), '()\n', (9441, 9443), False, 'from marvin import config, marvindb\n'), ((9458, 9476), 'marvin.config.forceDbOn', 'config.forceDbOn', ([], {}), '()\n', (9474, 9476), False, 'from marvin import config, marvindb\n'), ((9585, 9603), 'marvin.config.forceDbOn', 'config.forceDbOn', ([], {}), '()\n', (9601, 9603), False, 'from marvin import config, marvindb\n'), ((10189, 10207), 'marvin.config.forceDbOn', 'config.forceDbOn', ([], {}), '()\n', (10205, 10207), False, 'from marvin import config, marvindb\n'), ((13245, 13277), 'os.getenv', 'os.getenv', (['"""MANGA_SPECTRO_REDUX"""'], {}), "('MANGA_SPECTRO_REDUX')\n", (13254, 13277), False, 'import os\n'), ((13289, 13324), 'os.getenv', 'os.getenv', (['"""MANGA_SPECTRO_ANALYSIS"""'], {}), "('MANGA_SPECTRO_ANALYSIS')\n", (13298, 13324), False, 'import os\n'), ((13617, 13642), 'os.getenv', 'os.getenv', (['"""SAS_BASE_DIR"""'], {}), "('SAS_BASE_DIR')\n", (13626, 13642), False, 'import os\n'), ((13660, 13692), 'os.getenv', 'os.getenv', (['"""MANGA_SPECTRO_REDUX"""'], {}), "('MANGA_SPECTRO_REDUX')\n", (13669, 13692), False, 'import os\n'), ((13713, 13748), 'os.getenv', 'os.getenv', (['"""MANGA_SPECTRO_ANALYSIS"""'], {}), "('MANGA_SPECTRO_ANALYSIS')\n", (13722, 13748), False, 'import os\n'), ((20483, 20526), 'marvin.tools.maps.Maps', 'Maps', ([], {'plateifu': '"""8485-1901"""', 'release': 'release'}), "(plateifu='8485-1901', release=release)\n", (20487, 20526), False, 'from marvin.tools.maps import Maps\n'), ((20936, 20997), 'marvin.tools.query.Query', 'Query', ([], {'search_filter': 'searchfilter', 'mode': 'mode', 'release': 'release'}), '(search_filter=searchfilter, mode=mode, release=release)\n', (20941, 20997), False, 'from marvin.tools.query import Query\n'), ((21171, 21189), 'marvin.config.forceDbOn', 'config.forceDbOn', ([], {}), '()\n', (21187, 21189), False, 'from marvin import config, marvindb\n'), ((1394, 1442), 'pytest.skip', 'pytest.skip', (['"""Requires --runslow option to run."""'], {}), "('Requires --runslow option to run.')\n", (1405, 1442), False, 'import pytest\n'), ((4628, 4673), 'pytest.skip', 'pytest.skip', (['"""Skipping non-requested release"""'], {}), "('Skipping non-requested release')\n", (4639, 4673), False, 'import pytest\n'), ((5457, 5502), 'pytest.skip', 'pytest.skip', (['"""Skipping non-requested release"""'], {}), "('Skipping non-requested release')\n", (5468, 5502), False, 'import pytest\n'), ((5566, 5611), 'pytest.skip', 'pytest.skip', (['"""Skipping non-requested bintype"""'], {}), "('Skipping non-requested bintype')\n", (5577, 5611), False, 'import pytest\n'), ((5677, 5723), 'pytest.skip', 'pytest.skip', (['"""Skipping non-requested template"""'], {}), "('Skipping non-requested template')\n", (5688, 5723), False, 'import pytest\n'), ((5936, 5982), 'pytest.skip', 'pytest.skip', (['"""Skipping non-requested galaxies"""'], {}), "('Skipping non-requested galaxies')\n", (5947, 5982), False, 'import pytest\n'), ((6188, 6233), 'pytest.skip', 'pytest.skip', (['"""Skipping non-requested origins"""'], {}), "('Skipping non-requested origins')\n", (6199, 6233), False, 'import pytest\n'), ((6421, 6464), 'pytest.skip', 'pytest.skip', (['"""Skipping non-requested modes"""'], {}), "('Skipping non-requested modes')\n", (6432, 6464), False, 'import pytest\n'), ((7209, 7294), 'marvin.api.api.Interaction', 'Interaction', (['"""/marvin/api/general/getroutemap"""'], {'request_type': '"""get"""', 'auth': '"""netrc"""'}), "('/marvin/api/general/getroutemap', request_type='get', auth='netrc'\n )\n", (7220, 7294), False, 'from marvin.api.api import Interaction\n'), ((8560, 8591), 'os.environ.get', 'os.environ.get', (['"""MARVIN_SECRET"""'], {}), "('MARVIN_SECRET')\n", (8574, 8591), False, 'import os\n'), ((9726, 9744), 'marvin.config.forceDbOn', 'config.forceDbOn', ([], {}), '()\n', (9742, 9744), False, 'from marvin import config, marvindb\n'), ((9763, 9782), 'marvin.config.forceDbOff', 'config.forceDbOff', ([], {}), '()\n', (9780, 9782), False, 'from marvin import config, marvindb\n'), ((10016, 10057), 'pytest.skip', 'pytest.skip', (['"""Skipping non-requested dbs"""'], {}), "('Skipping non-requested dbs')\n", (10027, 10057), False, 'import pytest\n'), ((10096, 10114), 'marvin.config.forceDbOn', 'config.forceDbOn', ([], {}), '()\n', (10112, 10114), False, 'from marvin import config, marvindb\n'), ((10133, 10152), 'marvin.config.forceDbOff', 'config.forceDbOff', ([], {}), '()\n', (10150, 10152), False, 'from marvin import config, marvindb\n'), ((14166, 14207), 'copy.deepcopy', 'copy.deepcopy', (['galaxy_data[self.plateifu]'], {}), '(galaxy_data[self.plateifu])\n', (14179, 14207), False, 'import copy\n'), ((15084, 15119), 'marvin.config.lookUpVersions', 'config.lookUpVersions', (['self.release'], {}), '(self.release)\n', (15105, 15119), False, 'from marvin import config, marvindb\n'), ((15999, 16005), 'sdss_access.path.Path', 'Path', ([], {}), '()\n', (16003, 16005), False, 'from sdss_access.path import Path\n'), ((16017, 16054), 'marvin.utils.general.check_versions', 'check_versions', (['self.drpver', '"""v2_5_3"""'], {}), "(self.drpver, 'v2_5_3')\n", (16031, 16054), False, 'from marvin.utils.general import check_versions\n'), ((18748, 18761), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (18759, 18761), False, 'import pytest\n'), ((18803, 18868), 'marvin.tools.cube.Cube', 'Cube', ([], {'filename': 'galaxy.cubepath', 'release': 'galaxy.release', 'mode': 'mode'}), '(filename=galaxy.cubepath, release=galaxy.release, mode=mode)\n', (18807, 18868), False, 'from marvin.tools.cube import Cube\n'), ((18891, 18956), 'marvin.tools.cube.Cube', 'Cube', ([], {'plateifu': 'galaxy.plateifu', 'release': 'galaxy.release', 'mode': 'mode'}), '(plateifu=galaxy.plateifu, release=galaxy.release, mode=mode)\n', (18895, 18956), False, 'from marvin.tools.cube import Cube\n'), ((19329, 19428), 'marvin.tools.modelcube.ModelCube', 'ModelCube', ([], {'filename': 'galaxy.modelpath', 'release': 'galaxy.release', 'mode': 'mode', 'bintype': 'galaxy.bintype'}), '(filename=galaxy.modelpath, release=galaxy.release, mode=mode,\n bintype=galaxy.bintype)\n', (19338, 19428), False, 'from marvin.tools.modelcube import ModelCube\n'), ((19448, 19546), 'marvin.tools.modelcube.ModelCube', 'ModelCube', ([], {'plateifu': 'galaxy.plateifu', 'release': 'galaxy.release', 'mode': 'mode', 'bintype': 'galaxy.bintype'}), '(plateifu=galaxy.plateifu, release=galaxy.release, mode=mode,\n bintype=galaxy.bintype)\n', (19457, 19546), False, 'from marvin.tools.modelcube import ModelCube\n'), ((19900, 19994), 'marvin.tools.maps.Maps', 'Maps', ([], {'filename': 'galaxy.mapspath', 'release': 'galaxy.release', 'mode': 'mode', 'bintype': 'galaxy.bintype'}), '(filename=galaxy.mapspath, release=galaxy.release, mode=mode, bintype=\n galaxy.bintype)\n', (19904, 19994), False, 'from marvin.tools.maps import Maps\n'), ((20012, 20106), 'marvin.tools.maps.Maps', 'Maps', ([], {'plateifu': 'galaxy.plateifu', 'release': 'galaxy.release', 'mode': 'mode', 'bintype': 'galaxy.bintype'}), '(plateifu=galaxy.plateifu, release=galaxy.release, mode=mode, bintype=\n galaxy.bintype)\n', (20016, 20106), False, 'from marvin.tools.maps import Maps\n'), ((20795, 20855), 'pytest.skip', 'pytest.skip', (['"""cannot use queries in local mode without a db"""'], {}), "('cannot use queries in local mode without a db')\n", (20806, 20855), False, 'import pytest\n'), ((21054, 21164), 'pytest.xfail', 'pytest.xfail', (['"""cannot control for DAP spaxel queries on server side; failing all remotes until then"""'], {}), "(\n 'cannot control for DAP spaxel queries on server side; failing all remotes until then'\n )\n", (21066, 21164), False, 'import pytest\n'), ((4198, 4223), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4213, 4223), False, 'import os\n'), ((4299, 4316), 'brain.utils.general.get_yaml_loader', 'get_yaml_loader', ([], {}), '()\n', (4314, 4316), False, 'from brain.utils.general import get_yaml_loader\n'), ((4341, 4366), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4356, 4366), False, 'import os\n'), ((4440, 4457), 'brain.utils.general.get_yaml_loader', 'get_yaml_loader', ([], {}), '()\n', (4455, 4457), False, 'from brain.utils.general import get_yaml_loader\n'), ((7011, 7052), 'os.environ.get', 'os.environ.get', (['"""LOCAL_MARVIN_PORT"""', '(5000)'], {}), "('LOCAL_MARVIN_PORT', 5000)\n", (7025, 7052), False, 'import os\n'), ((4840, 4907), 'itertools.product', 'itertools.product', (['[release]', 'bintypes[release]', 'templates[release]'], {}), '([release], bintypes[release], templates[release])\n', (4857, 4907), False, 'import itertools\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#Import Local Modules
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.lib.base import (SecurityGroup,
Account)
from marvin.lib.common import (get_zone,
get_domain,
get_template)
from marvin.lib.utils import (validateList,
cleanup_resources)
from marvin.codes import (PASS, EMPTY_LIST)
from nose.plugins.attrib import attr
class TestSecurityGroups(cloudstackTestCase):
@classmethod
def setUpClass(cls):
try:
cls._cleanup = []
cls.testClient = super(TestSecurityGroups, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = cls.testClient.getParsedTestDataConfig()
# Get Domain, Zone, Template
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services['mode'] = cls.zone.networktype
cls.account = Account.create(
cls.api_client,
cls.services["account"],
domainid=cls.domain.id
)
# Getting authentication for user in newly created Account
cls.user = cls.account.user[0]
cls.userapiclient = cls.testClient.getUserApiClient(cls.user.username, cls.domain.name)
cls._cleanup.append(cls.account)
except Exception as e:
cls.tearDownClass()
raise Exception("Warning: Exception in setup : %s" % e)
return
def setUp(self):
self.apiClient = self.testClient.getApiClient()
self.cleanup = []
def tearDown(self):
#Clean up, terminate the created resources
cleanup_resources(self.apiClient, self.cleanup)
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def __verify_values(self, expected_vals, actual_vals):
"""
@Desc: Function to verify expected and actual values
@Steps:
Step1: Initializing return flag to True
Step1: Verifying length of expected and actual dictionaries is matching.
If not matching returning false
Step2: Listing all the keys from expected dictionary
Step3: Looping through each key from step2 and verifying expected and actual dictionaries have same value
If not making return flag to False
Step4: returning the return flag after all the values are verified
"""
return_flag = True
if len(expected_vals) != len(actual_vals):
return False
keys = expected_vals.keys()
for i in range(0, len(expected_vals)):
exp_val = expected_vals[keys[i]]
act_val = actual_vals[keys[i]]
if exp_val == act_val:
return_flag = return_flag and True
else:
return_flag = return_flag and False
self.debug("expected Value: %s, is not matching with actual value: %s" % (
exp_val,
act_val
))
return return_flag
@attr(tags=["basic", "provisioning"])
def test_01_list_securitygroups_pagination(self):
"""
@Desc: Test to List Security Groups pagination
@steps:
Step1: Listing all the Security Groups for a user
Step2: Verifying that list size is 1
Step3: Creating (page size) number of Security Groups
Step4: Listing all the Security Groups again for a user
Step5: Verifying that list size is (page size + 1)
Step6: Listing all the Security Groups in page1
Step7: Verifying that list size is (page size)
Step8: Listing all the Security Groups in page2
Step9: Verifying that list size is 1
Step10: Deleting the Security Group present in page 2
Step11: Listing all the Security Groups in page2
Step12: Verifying that no security groups are listed
"""
# Listing all the Security Groups for a User
list_securitygroups_before = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"]
)
# Verifying that default security group is created
status = validateList(list_securitygroups_before)
self.assertEquals(
PASS,
status[0],
"Default Security Groups creation failed"
)
# Verifying the size of the list is 1
self.assertEquals(
1,
len(list_securitygroups_before),
"Count of Security Groups list is not matching"
)
# Creating pagesize number of security groups
for i in range(0, (self.services["pagesize"])):
securitygroup_created = SecurityGroup.create(
self.userapiclient,
self.services["security_group"],
account=self.account.name,
domainid=self.domain.id,
description=self.services["security_group"]["name"]
)
self.assertIsNotNone(
securitygroup_created,
"Security Group creation failed"
)
if (i < self.services["pagesize"]):
self.cleanup.append(securitygroup_created)
# Listing all the security groups for user again
list_securitygroups_after = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"]
)
status = validateList(list_securitygroups_after)
self.assertEquals(
PASS,
status[0],
"Security Groups creation failed"
)
# Verifying that list size is pagesize + 1
self.assertEquals(
self.services["pagesize"] + 1,
len(list_securitygroups_after),
"Failed to create pagesize + 1 number of Security Groups"
)
# Listing all the security groups in page 1
list_securitygroups_page1 = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"],
page=1,
pagesize=self.services["pagesize"]
)
status = validateList(list_securitygroups_page1)
self.assertEquals(
PASS,
status[0],
"Failed to list security groups in page 1"
)
# Verifying the list size to be equal to pagesize
self.assertEquals(
self.services["pagesize"],
len(list_securitygroups_page1),
"Size of security groups in page 1 is not matching"
)
# Listing all the security groups in page 2
list_securitygroups_page2 = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"],
page=2,
pagesize=self.services["pagesize"]
)
status = validateList(list_securitygroups_page2)
self.assertEquals(
PASS,
status[0],
"Failed to list security groups in page 2"
)
# Verifying the list size to be equal to pagesize
self.assertEquals(
1,
len(list_securitygroups_page2),
"Size of security groups in page 2 is not matching"
)
# Deleting the security group present in page 2
SecurityGroup.delete(
securitygroup_created,
self.userapiclient)
self.cleanup.remove(securitygroup_created)
# Listing all the security groups in page 2 again
list_securitygroups_page2 = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"],
page=2,
pagesize=self.services["pagesize"]
)
# Verifying that there are no security groups listed
self.assertIsNone(
list_securitygroups_page2,
"Security Groups not deleted from page 2"
)
return
@attr(tags=["basic", "provisioning"])
def test_02_securitygroups_authorize_revoke_ingress(self):
"""
@Desc: Test to Authorize and Revoke Ingress for Security Group
@steps:
Step1: Listing all the Security Groups for a user
Step2: Verifying that list size is 1
Step3: Creating a Security Groups
Step4: Listing all the Security Groups again for a user
Step5: Verifying that list size is 2
Step6: Authorizing Ingress for the security group created in step3
Step7: Listing the security groups by passing id of security group created in step3
Step8: Verifying that list size is 1
Step9: Verifying that Ingress is authorized to the security group
Step10: Verifying the details of the Ingress rule are as expected
Step11: Revoking Ingress for the security group created in step3
Step12: Listing the security groups by passing id of security group created in step3
Step13: Verifying that list size is 1
Step14: Verifying that Ingress is revoked from the security group
"""
# Listing all the Security Groups for a User
list_securitygroups_before = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"]
)
# Verifying that default security group is created
status = validateList(list_securitygroups_before)
self.assertEquals(
PASS,
status[0],
"Default Security Groups creation failed"
)
# Verifying the size of the list is 1
self.assertEquals(
1,
len(list_securitygroups_before),
"Count of Security Groups list is not matching"
)
# Creating a security group
securitygroup_created = SecurityGroup.create(
self.userapiclient,
self.services["security_group"],
account=self.account.name,
domainid=self.domain.id,
description=self.services["security_group"]["name"]
)
self.assertIsNotNone(
securitygroup_created,
"Security Group creation failed"
)
self.cleanup.append(securitygroup_created)
# Listing all the security groups for user again
list_securitygroups_after = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"]
)
status = validateList(list_securitygroups_after)
self.assertEquals(
PASS,
status[0],
"Security Groups creation failed"
)
# Verifying that list size is 2
self.assertEquals(
2,
len(list_securitygroups_after),
"Failed to create Security Group"
)
# Authorizing Ingress for the security group created in step3
securitygroup_created.authorize(
self.userapiclient,
self.services["ingress_rule"],
self.account.name,
self.domain.id,
)
# Listing the security group by Id
list_securitygroups_byid = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"],
id=securitygroup_created.id,
domainid=self.domain.id
)
# Verifying that security group is listed
status = validateList(list_securitygroups_byid)
self.assertEquals(
PASS,
status[0],
"Listing of Security Groups by id failed"
)
# Verifying size of the list is 1
self.assertEquals(
1,
len(list_securitygroups_byid),
"Count of the listing security group by id is not matching"
)
securitygroup_ingress = list_securitygroups_byid[0].ingressrule
# Validating the Ingress rule
status = validateList(securitygroup_ingress)
self.assertEquals(
PASS,
status[0],
"Security Groups Ingress rule authorization failed"
)
self.assertEquals(
1,
len(securitygroup_ingress),
"Security Group Ingress rules count is not matching"
)
# Verifying the details of the Ingress rule are as expected
#Creating expected and actual values dictionaries
expected_dict = {
"cidr":self.services["ingress_rule"]["cidrlist"],
"protocol":self.services["ingress_rule"]["protocol"],
"startport":self.services["ingress_rule"]["startport"],
"endport":self.services["ingress_rule"]["endport"],
}
actual_dict = {
"cidr":str(securitygroup_ingress[0].cidr),
"protocol":str(securitygroup_ingress[0].protocol.upper()),
"startport":str(securitygroup_ingress[0].startport),
"endport":str(securitygroup_ingress[0].endport),
}
ingress_status = self.__verify_values(
expected_dict,
actual_dict
)
self.assertEqual(
True,
ingress_status,
"Listed Security group Ingress rule details are not as expected"
)
# Revoking the Ingress rule from Security Group
securitygroup_created.revoke(self.userapiclient, securitygroup_ingress[0].ruleid)
# Listing the security group by Id
list_securitygroups_byid = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"],
id=securitygroup_created.id,
domainid=self.domain.id
)
# Verifying that security group is listed
status = validateList(list_securitygroups_byid)
self.assertEquals(
PASS,
status[0],
"Listing of Security Groups by id failed"
)
# Verifying size of the list is 1
self.assertEquals(
1,
len(list_securitygroups_byid),
"Count of the listing security group by id is not matching"
)
securitygroup_ingress = list_securitygroups_byid[0].ingressrule
# Verifying that Ingress rule is empty(revoked)
status = validateList(securitygroup_ingress)
self.assertEquals(
EMPTY_LIST,
status[2],
"Security Groups Ingress rule is not revoked"
)
return
@attr(tags=["basic", "provisioning"])
def test_03_securitygroups_authorize_revoke_egress(self):
"""
@Desc: Test to Authorize and Revoke Egress for Security Group
@steps:
Step1: Listing all the Security Groups for a user
Step2: Verifying that list size is 1
Step3: Creating a Security Groups
Step4: Listing all the Security Groups again for a user
Step5: Verifying that list size is 2
Step6: Authorizing Egress for the security group created in step3
Step7: Listing the security groups by passing id of security group created in step3
Step8: Verifying that list size is 1
Step9: Verifying that Egress is authorized to the security group
Step10: Verifying the details of the Egress rule are as expected
Step11: Revoking Egress for the security group created in step3
Step12: Listing the security groups by passing id of security group created in step3
Step13: Verifying that list size is 1
Step14: Verifying that Egress is revoked from the security group
"""
# Listing all the Security Groups for a User
list_securitygroups_before = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"]
)
# Verifying that default security group is created
status = validateList(list_securitygroups_before)
self.assertEquals(
PASS,
status[0],
"Default Security Groups creation failed"
)
# Verifying the size of the list is 1
self.assertEquals(
1,
len(list_securitygroups_before),
"Count of Security Groups list is not matching"
)
# Creating a security group
securitygroup_created = SecurityGroup.create(
self.userapiclient,
self.services["security_group"],
account=self.account.name,
domainid=self.domain.id,
description=self.services["security_group"]["name"]
)
self.assertIsNotNone(
securitygroup_created,
"Security Group creation failed"
)
self.cleanup.append(securitygroup_created)
# Listing all the security groups for user again
list_securitygroups_after = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"]
)
status = validateList(list_securitygroups_after)
self.assertEquals(
PASS,
status[0],
"Security Groups creation failed"
)
# Verifying that list size is 2
self.assertEquals(
2,
len(list_securitygroups_after),
"Failed to create Security Group"
)
# Authorizing Egress for the security group created in step3
securitygroup_created.authorizeEgress(
self.userapiclient,
self.services["ingress_rule"],
self.account.name,
self.domain.id,
)
# Listing the security group by Id
list_securitygroups_byid = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"],
id=securitygroup_created.id,
domainid=self.domain.id
)
# Verifying that security group is listed
status = validateList(list_securitygroups_byid)
self.assertEquals(
PASS,
status[0],
"Listing of Security Groups by id failed"
)
# Verifying size of the list is 1
self.assertEquals(
1,
len(list_securitygroups_byid),
"Count of the listing security group by id is not matching"
)
securitygroup_egress = list_securitygroups_byid[0].egressrule
# Validating the Ingress rule
status = validateList(securitygroup_egress)
self.assertEquals(
PASS,
status[0],
"Security Groups Egress rule authorization failed"
)
self.assertEquals(
1,
len(securitygroup_egress),
"Security Group Egress rules count is not matching"
)
# Verifying the details of the Egress rule are as expected
#Creating expected and actual values dictionaries
expected_dict = {
"cidr":self.services["ingress_rule"]["cidrlist"],
"protocol":self.services["ingress_rule"]["protocol"],
"startport":self.services["ingress_rule"]["startport"],
"endport":self.services["ingress_rule"]["endport"],
}
actual_dict = {
"cidr":str(securitygroup_egress[0].cidr),
"protocol":str(securitygroup_egress[0].protocol.upper()),
"startport":str(securitygroup_egress[0].startport),
"endport":str(securitygroup_egress[0].endport),
}
ingress_status = self.__verify_values(
expected_dict,
actual_dict
)
self.assertEqual(
True,
ingress_status,
"Listed Security group Egress rule details are not as expected"
)
# Revoking the Egress rule from Security Group
securitygroup_created.revokeEgress(self.userapiclient, securitygroup_egress[0].ruleid)
# Listing the security group by Id
list_securitygroups_byid = SecurityGroup.list(
self.userapiclient,
listall=self.services["listall"],
id=securitygroup_created.id,
domainid=self.domain.id
)
# Verifying that security group is listed
status = validateList(list_securitygroups_byid)
self.assertEquals(
PASS,
status[0],
"Listing of Security Groups by id failed"
)
# Verifying size of the list is 1
self.assertEquals(
1,
len(list_securitygroups_byid),
"Count of the listing security group by id is not matching"
)
securitygroup_egress = list_securitygroups_byid[0].egressrule
# Verifying that Ingress rule is empty(revoked)
status = validateList(securitygroup_egress)
self.assertEquals(
EMPTY_LIST,
status[2],
"Security Groups Egress rule is not revoked"
)
return
| [
"marvin.lib.utils.validateList",
"marvin.lib.base.Account.create",
"marvin.lib.base.SecurityGroup.create",
"marvin.lib.base.SecurityGroup.delete",
"marvin.lib.common.get_domain",
"marvin.lib.base.SecurityGroup.list",
"marvin.lib.common.get_template",
"marvin.lib.utils.cleanup_resources"
] | [((4659, 4695), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['basic', 'provisioning']"}), "(tags=['basic', 'provisioning'])\n", (4663, 4695), False, 'from nose.plugins.attrib import attr\n'), ((11305, 11341), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['basic', 'provisioning']"}), "(tags=['basic', 'provisioning'])\n", (11309, 11341), False, 'from nose.plugins.attrib import attr\n'), ((19815, 19851), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['basic', 'provisioning']"}), "(tags=['basic', 'provisioning'])\n", (19819, 19851), False, 'from nose.plugins.attrib import attr\n'), ((2873, 2920), 'marvin.lib.utils.cleanup_resources', 'cleanup_resources', (['self.apiClient', 'self.cleanup'], {}), '(self.apiClient, self.cleanup)\n', (2890, 2920), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((5615, 5687), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']"}), "(self.userapiclient, listall=self.services['listall'])\n", (5633, 5687), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((5934, 5974), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_before'], {}), '(list_securitygroups_before)\n', (5946, 5974), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((7491, 7563), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']"}), "(self.userapiclient, listall=self.services['listall'])\n", (7509, 7563), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((7748, 7787), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_after'], {}), '(list_securitygroups_after)\n', (7760, 7787), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((8365, 8485), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']", 'page': '(1)', 'pagesize': "self.services['pagesize']"}), "(self.userapiclient, listall=self.services['listall'],\n page=1, pagesize=self.services['pagesize'])\n", (8383, 8485), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((8776, 8815), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_page1'], {}), '(list_securitygroups_page1)\n', (8788, 8815), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((9399, 9519), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']", 'page': '(2)', 'pagesize': "self.services['pagesize']"}), "(self.userapiclient, listall=self.services['listall'],\n page=2, pagesize=self.services['pagesize'])\n", (9417, 9519), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((9810, 9849), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_page2'], {}), '(list_securitygroups_page2)\n', (9822, 9849), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((10385, 10448), 'marvin.lib.base.SecurityGroup.delete', 'SecurityGroup.delete', (['securitygroup_created', 'self.userapiclient'], {}), '(securitygroup_created, self.userapiclient)\n', (10405, 10448), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((10653, 10773), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']", 'page': '(2)', 'pagesize': "self.services['pagesize']"}), "(self.userapiclient, listall=self.services['listall'],\n page=2, pagesize=self.services['pagesize'])\n", (10671, 10773), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((12506, 12578), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']"}), "(self.userapiclient, listall=self.services['listall'])\n", (12524, 12578), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((12825, 12865), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_before'], {}), '(list_securitygroups_before)\n', (12837, 12865), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((13389, 13576), 'marvin.lib.base.SecurityGroup.create', 'SecurityGroup.create', (['self.userapiclient', "self.services['security_group']"], {'account': 'self.account.name', 'domainid': 'self.domain.id', 'description': "self.services['security_group']['name']"}), "(self.userapiclient, self.services['security_group'],\n account=self.account.name, domainid=self.domain.id, description=self.\n services['security_group']['name'])\n", (13409, 13576), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((14208, 14280), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']"}), "(self.userapiclient, listall=self.services['listall'])\n", (14226, 14280), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((14465, 14504), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_after'], {}), '(list_securitygroups_after)\n', (14477, 14504), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((15408, 15539), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']", 'id': 'securitygroup_created.id', 'domainid': 'self.domain.id'}), "(self.userapiclient, listall=self.services['listall'], id\n =securitygroup_created.id, domainid=self.domain.id)\n", (15426, 15539), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((15874, 15912), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_byid'], {}), '(list_securitygroups_byid)\n', (15886, 15912), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((16501, 16536), 'marvin.lib.utils.validateList', 'validateList', (['securitygroup_ingress'], {}), '(securitygroup_ingress)\n', (16513, 16536), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((18445, 18576), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']", 'id': 'securitygroup_created.id', 'domainid': 'self.domain.id'}), "(self.userapiclient, listall=self.services['listall'], id\n =securitygroup_created.id, domainid=self.domain.id)\n", (18463, 18576), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((18911, 18949), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_byid'], {}), '(list_securitygroups_byid)\n', (18923, 18949), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((19556, 19591), 'marvin.lib.utils.validateList', 'validateList', (['securitygroup_ingress'], {}), '(securitygroup_ingress)\n', (19568, 19591), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((21009, 21081), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']"}), "(self.userapiclient, listall=self.services['listall'])\n", (21027, 21081), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((21328, 21368), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_before'], {}), '(list_securitygroups_before)\n', (21340, 21368), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((21892, 22079), 'marvin.lib.base.SecurityGroup.create', 'SecurityGroup.create', (['self.userapiclient', "self.services['security_group']"], {'account': 'self.account.name', 'domainid': 'self.domain.id', 'description': "self.services['security_group']['name']"}), "(self.userapiclient, self.services['security_group'],\n account=self.account.name, domainid=self.domain.id, description=self.\n services['security_group']['name'])\n", (21912, 22079), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((22711, 22783), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']"}), "(self.userapiclient, listall=self.services['listall'])\n", (22729, 22783), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((22968, 23007), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_after'], {}), '(list_securitygroups_after)\n', (22980, 23007), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((23946, 24077), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']", 'id': 'securitygroup_created.id', 'domainid': 'self.domain.id'}), "(self.userapiclient, listall=self.services['listall'], id\n =securitygroup_created.id, domainid=self.domain.id)\n", (23964, 24077), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((24412, 24450), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_byid'], {}), '(list_securitygroups_byid)\n', (24424, 24450), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((25037, 25071), 'marvin.lib.utils.validateList', 'validateList', (['securitygroup_egress'], {}), '(securitygroup_egress)\n', (25049, 25071), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((26975, 27106), 'marvin.lib.base.SecurityGroup.list', 'SecurityGroup.list', (['self.userapiclient'], {'listall': "self.services['listall']", 'id': 'securitygroup_created.id', 'domainid': 'self.domain.id'}), "(self.userapiclient, listall=self.services['listall'], id\n =securitygroup_created.id, domainid=self.domain.id)\n", (26993, 27106), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((27441, 27479), 'marvin.lib.utils.validateList', 'validateList', (['list_securitygroups_byid'], {}), '(list_securitygroups_byid)\n', (27453, 27479), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((28084, 28118), 'marvin.lib.utils.validateList', 'validateList', (['securitygroup_egress'], {}), '(securitygroup_egress)\n', (28096, 28118), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((1655, 1681), 'marvin.lib.common.get_domain', 'get_domain', (['cls.api_client'], {}), '(cls.api_client)\n', (1665, 1681), False, 'from marvin.lib.common import get_zone, get_domain, get_template\n'), ((1791, 1856), 'marvin.lib.common.get_template', 'get_template', (['cls.api_client', 'cls.zone.id', "cls.services['ostype']"], {}), "(cls.api_client, cls.zone.id, cls.services['ostype'])\n", (1803, 1856), False, 'from marvin.lib.common import get_zone, get_domain, get_template\n'), ((2069, 2148), 'marvin.lib.base.Account.create', 'Account.create', (['cls.api_client', "cls.services['account']"], {'domainid': 'cls.domain.id'}), "(cls.api_client, cls.services['account'], domainid=cls.domain.id)\n", (2083, 2148), False, 'from marvin.lib.base import SecurityGroup, Account\n'), ((3007, 3054), 'marvin.lib.utils.cleanup_resources', 'cleanup_resources', (['cls.api_client', 'cls._cleanup'], {}), '(cls.api_client, cls._cleanup)\n', (3024, 3054), False, 'from marvin.lib.utils import validateList, cleanup_resources\n'), ((6576, 6763), 'marvin.lib.base.SecurityGroup.create', 'SecurityGroup.create', (['self.userapiclient', "self.services['security_group']"], {'account': 'self.account.name', 'domainid': 'self.domain.id', 'description': "self.services['security_group']['name']"}), "(self.userapiclient, self.services['security_group'],\n account=self.account.name, domainid=self.domain.id, description=self.\n services['security_group']['name'])\n", (6596, 6763), False, 'from marvin.lib.base import SecurityGroup, Account\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Component tests for VSP Managed Subnets functionality
with Nuage VSP SDN plugin
"""
# Import Local Modules
from nuageTestCase import nuageTestCase, needscleanup
from marvin.lib.base import (Account,
Domain,
VirtualMachine)
from marvin.cloudstackAPI import updateZone
# Import System Modules
from nose.plugins.attrib import attr
import time
class TestNuageManagedSubnets(nuageTestCase):
"""Test Managed Subnets functionality with Nuage VSP SDN plugin
"""
@classmethod
def setUpClass(cls):
super(TestNuageManagedSubnets, cls).setUpClass()
# create a nuage vpc offering
cls.nuage_vpc_offering = \
cls.create_VpcOffering(cls.test_data["nuagevsp"]["vpc_offering"])
# tier network offerings
cls.nuage_vpc_network_offering = \
cls.create_NetworkOffering(cls.test_data["nuagevsp"]
["vpc_network_offering"])
# create a Nuage isolated network offering with vr
cls.nuage_isolated_network_offering = cls.create_NetworkOffering(
cls.test_data["nuagevsp"]["isolated_network_offering"], True)
# create a Nuage isolated network offering with vr and persistent
cls.nuage_isolated_network_offering_persistent = \
cls.create_NetworkOffering(
cls.test_data["nuagevsp"]
["isolated_network_offering_persistent"],
True)
# create a Nuage isolated network offering without vr
cls.nuage_isolated_network_offering_without_vr = \
cls.create_NetworkOffering(
cls.test_data["nuagevsp"]
["isolated_network_offering_without_vr"],
True)
# create a Nuage isolated network offering without vr but persistent
cls.nuage_isolated_network_offering_without_vr_persistent = \
cls.create_NetworkOffering(
cls.test_data["nuagevsp"]
["isolated_network_offering_without_vr_persistent"],
True)
# create a Nuage shared network offering
cls.nuage_shared_network_offering = cls.create_NetworkOffering(
cls.test_data["nuagevsp"]["shared_nuage_network_offering"],
False)
cls._cleanup = [
cls.nuage_isolated_network_offering,
cls.nuage_isolated_network_offering_persistent,
cls.nuage_isolated_network_offering_without_vr,
cls.nuage_isolated_network_offering_without_vr_persistent,
cls.nuage_vpc_offering,
cls.nuage_vpc_network_offering,
cls.nuage_shared_network_offering
]
return
def setUp(self):
# Create an account
self.account = Account.create(self.api_client,
self.test_data["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = [self.account]
return
def verify_ping_to_vm(self, src_vm, dst_vm, public_ip, dst_hostname=None):
if self.isSimulator:
self.debug("Simulator Environment: not verifying pinging")
return
try:
src_vm.ssh_ip = public_ip.ipaddress.ipaddress
src_vm.ssh_port = self.test_data["virtual_machine"]["ssh_port"]
src_vm.username = self.test_data["virtual_machine"]["username"]
src_vm.password = self.test_data["virtual_machine"]["password"]
self.debug("SSHing into VM: %s with %s" %
(src_vm.ssh_ip, src_vm.password))
ssh = self.ssh_into_VM(src_vm, public_ip)
except Exception as e:
self.fail("SSH into VM failed with exception %s" % e)
self.verify_pingtovmipaddress(ssh, dst_vm.ipaddress)
if dst_hostname:
self.verify_pingtovmipaddress(ssh, dst_hostname)
def verify_pingtovmipaddress(self, ssh, pingtovmipaddress):
"""verify ping to ipaddress of the vm and retry 3 times"""
successfull_ping = False
nbr_retries = 0
max_retries = 5
cmd = 'ping -c 2 ' + pingtovmipaddress
while not successfull_ping and nbr_retries < max_retries:
self.debug("ping vm by ipaddress with command: " + cmd)
outputlist = ssh.execute(cmd)
self.debug("command is executed properly " + cmd)
completeoutput = str(outputlist).strip('[]')
self.debug("complete output is " + completeoutput)
if '2 received' in completeoutput:
self.debug("PASS as vm is pingeable: " + completeoutput)
successfull_ping = True
else:
self.debug("FAIL as vm is not pingeable: " + completeoutput)
time.sleep(3)
nbr_retries = nbr_retries + 1
if not successfull_ping:
self.fail("FAILED TEST as excepted value not found in vm")
# verify_vsd_vm - Verifies the given CloudStack VM deployment and status in
# VSD
def verify_vsdmngd_vm(self, vm, vsdmngd_subnet, stopped=False):
self.debug("Verifying the deployment and state of VSD Managed VM "
"- %s in VSD" % vm.name)
vsd_vm = self.vsd.get_vm(filter=self.get_externalID_filter(vm.id))
self.assertNotEqual(vsd_vm, None,
"VM data format in VSD should not be of type None"
)
vm_info = VirtualMachine.list(self.api_client, id=vm.id)[0]
for nic in vm_info.nic:
vsd_subnet = vsdmngd_subnet
vsd_vport = self.vsd.get_vport(
subnet=vsd_subnet, filter=self.get_externalID_filter(nic.id))
vsd_vm_interface = self.vsd.get_vm_interface(
filter=self.get_externalID_filter(nic.id))
self.assertEqual(vsd_vport.active, True,
"VSD VM vport should be active"
)
self.assertEqual(vsd_vm_interface.ip_address, nic.ipaddress,
"VSD VM interface IP address should match VM's "
"NIC IP address in CloudStack"
)
if not self.isSimulator:
self.verify_vsd_object_status(vm, stopped)
self.debug("Successfully verified the deployment and state of VM - %s "
"in VSD" % vm.name)
@attr(tags=["advanced", "nuagevsp", "isonw"], required_hardware="false")
def test_01_nuage_mngd_subnets_isonw(self):
"""Test Nuage VSP Managed Subnets for isolated networks
"""
# 1. Create multiple L3DomainTemplate with Zone and Subnet on VSP
# Create Ingress & Egress ACL Top & Bottom Templates
# Add ACL rules to allow intra-subnet traffic
# Instiantiate these L3Domains and store its Subnet VSD ID
# 2. Create a persistent and non persistent isolated network offering
# create offerings with and without VirtualRouter
# 3. Create isolated networks specifying above offerings and
# specifying the stored Subnet ID's of VSP
# 4. Verify ACL rules and connectivity via deploying VM's ,
# Enabling staticNAT, applying firewall and egress rules
# 5. Verify negative tests like uniqueness of vsd subnet
# Create all items on vsd required for this test
enterprise = self.fetch_by_externalID(self._session.user.enterprises,
self.domain)
domain_template = self.create_vsd_domain_template(enterprise)
self.create_vsd_default_acls(domain_template)
domain1 = self.create_vsd_domain(domain_template, enterprise,
"L3DomainToBeConsumedByACS")
zone1 = self.create_vsd_zone(domain1, "ZoneToBeConsumedByACS")
subnet1 = self.create_vsd_subnet(zone1, "SubnetToBeConsumedByACS",
"10.0.0.1/24")
domain2 = self.create_vsd_domain(domain_template, enterprise,
"2ndL3DomainToBeConsumedByACS")
zone2 = self.create_vsd_zone(domain2, "2ndZoneToBeConsumedByACS")
subnet2 = self.create_vsd_subnet(zone2, "2ndSubnetToBeConsumedByACS",
"10.1.0.1/24")
self.create_vsd_dhcp_option(subnet2, 15, ["nuagenetworks2.net"])
domain3 = self.create_vsd_domain(domain_template, enterprise,
"3rdL3DomainToBeConsumedByACS")
zone3 = self.create_vsd_zone(domain3, "3rdZoneToBeConsumedByACS")
subnet3 = self.create_vsd_subnet(zone3, "3rdSubnetToBeConsumedByACS",
"10.2.0.1/24")
for i in range(1, 3):
# On ACS create network using non-persistent nw offering allow
isolated_network = self.create_Network(
self.nuage_isolated_network_offering,
gateway="10.0.0.1", netmask="255.255.255.0",
externalid=subnet1.id, cleanup=False)
# On ACS create network using persistent nw offering allow
isolated_network2 = self.create_Network(
self.nuage_isolated_network_offering_persistent,
gateway="10.5.0.1", netmask="255.255.255.0",
externalid=subnet2.id, cleanup=False)
with self.assertRaises(Exception):
self.create_Network(
self.nuage_shared_network_offering, gateway="10.2.0.1",
netmask="255.255.255.0", vlan=1201, externalid=subnet3.id)
# On ACS create network when VSDSubnet is already in use
with self.assertRaises(Exception):
self.create_Network(
self.nuage_isolated_network_offering_persistent,
gateway="10.3.0.1", netmask="255.255.255.0",
externalid=subnet2.id)
# On ACS create network when VSDSubnet is non-existing
with self.assertRaises(Exception):
self.create_Network(
self.nuage_isolated_network_offering_persistent,
gateway="10.4.0.1", netmask="255.255.255.0",
externalid=subnet2.id+1)
# verify floating ip and intra subnet connectivity
vm_1 = self.create_VM(isolated_network, cleanup=False)
self.test_data["virtual_machine"]["displayname"] = "vm2"
self.test_data["virtual_machine"]["name"] = "vm2"
vm_2 = self.create_VM(isolated_network, cleanup=False)
self.test_data["virtual_machine"]["displayname"] = None
self.test_data["virtual_machine"]["name"] = None
# VSD verification
self.verify_vsd_network_not_present(isolated_network)
self.verify_vsdmngd_vm(vm_1, subnet1)
self.verify_vsdmngd_vm(vm_2, subnet1)
self.debug("Creating Static NAT rule for the deployed VM in the "
"non persistently created Isolated network...")
public_ip = self.acquire_PublicIPAddress(isolated_network)
self.validate_PublicIPAddress(public_ip, isolated_network)
self.create_StaticNatRule_For_VM(vm_1, public_ip, isolated_network)
self.validate_PublicIPAddress(
public_ip, isolated_network, static_nat=True, vm=vm_1)
self.create_FirewallRule(public_ip,
self.test_data["ingress_rule"])
self.verify_ping_to_vm(vm_1, vm_2, public_ip, "vm2")
vm_3 = self.create_VM(isolated_network2, cleanup=False)
self.test_data["virtual_machine"]["displayname"] = "vm4"
self.test_data["virtual_machine"]["name"] = "vm4"
vm_4 = self.create_VM(isolated_network2, cleanup=False)
self.test_data["virtual_machine"]["displayname"] = None
self.test_data["virtual_machine"]["name"] = None
self.verify_vsd_network_not_present(isolated_network2)
self.verify_vsdmngd_vm(vm_3, subnet2)
self.verify_vsdmngd_vm(vm_4, subnet2)
self.debug("Creating Static NAT rule for the deployed VM in the "
"persistently created Isolated network...")
public_ip2 = self.acquire_PublicIPAddress(isolated_network2)
self.validate_PublicIPAddress(public_ip2, isolated_network2)
self.create_StaticNatRule_For_VM(vm_3, public_ip2,
isolated_network2)
self.validate_PublicIPAddress(
public_ip2, isolated_network2, static_nat=True, vm=vm_3)
self.create_FirewallRule(public_ip2,
self.test_data["ingress_rule"])
self.verify_ping_to_vm(vm_3, vm_4, public_ip2)
vm_4.delete(self.api_client, expunge=True)
vm_3.delete(self.api_client, expunge=True)
vm_2.delete(self.api_client, expunge=True)
vm_1.delete(self.api_client, expunge=True)
isolated_network2.delete(self.api_client)
isolated_network.delete(self.api_client)
self.debug("Number of loops %s" % i)
@attr(tags=["advanced", "nuagevsp", "vpc"], required_hardware="false")
def test_02_nuage_mngd_subnets_vpc(self):
"""Test Nuage VSP Managed Subnets for vpc and tier networks
"""
# 1. Create multiple L3DomainTemplate with Zone and Subnet on VSP
# Create Ingress & Egress ACL Top & Bottom Templates
# Add ACL rules to allow intra-subnet traffic
# Instiantiate these L3Domains and store its Subnet VSD ID
# 2. Create a vpc network offering and create a VPC
# create vpc tier network offerings with and without VirtualRouter
# 3. Create vpc tier networks specifying above offerings and
# specifying the stored Subnet ID's of VSP
# 4. Verify ACL rules and connectivity via deploying VM's ,
# Enabling staticNAT, applying firewall and egress rules
# 5. Verify negative tests like uniqueness of vsd subnet
# Create all items on vsd required for this test
enterprise = self.fetch_by_externalID(self._session.user.enterprises,
self.domain)
domain_template = self.create_vsd_domain_template(enterprise)
self.create_vsd_default_acls(domain_template)
domain1 = self.create_vsd_domain(domain_template, enterprise,
"L3DomainToBeConsumedByACS")
zone1 = self.create_vsd_zone(domain1, "ZoneToBeConsumedByACS")
subnet1 = self.create_vsd_subnet(zone1, "SubnetToBeConsumedByACS",
"10.1.0.1/24")
subnet2 = self.create_vsd_subnet(zone1, "2ndSubnetToBeConsumedByACS",
"10.1.128.1/24")
domain2 = self.create_vsd_domain(domain_template, enterprise,
"2ndL3DomainToBeConsumedByACS")
zone2 = self.create_vsd_zone(domain2, "2ndZoneToBeConsumedByACS")
subnet3 = self.create_vsd_subnet(zone2, "3rdSubnetToBeConsumedByACS",
"10.2.128.1/24")
cmd = updateZone.updateZoneCmd()
cmd.id = self.zone.id
cmd.domain = "vpc.com"
self.api_client.updateZone(cmd)
self.debug("Creating a VPC with Static NAT service provider as "
"VpcVirtualRouter")
vpc = self.create_Vpc(self.nuage_vpc_offering, cidr='10.1.0.0/16')
self.validate_Vpc(vpc, state="Enabled")
acl_list = self.create_NetworkAclList(
name="acl", description="acl", vpc=vpc)
self.create_NetworkAclRule(
self.test_data["ingress_rule"], acl_list=acl_list)
self.create_NetworkAclRule(
self.test_data["icmprule"], acl_list=acl_list)
self.debug("Creating another VPC with Static NAT service provider "
"as VpcVirtualRouter")
vpc2 = self.create_Vpc(self.nuage_vpc_offering, cidr='10.2.0.0/16')
self.validate_Vpc(vpc2, state="Enabled")
acl_list2 = self.create_NetworkAclList(
name="acl", description="acl", vpc=vpc2)
self.create_NetworkAclRule(
self.test_data["ingress_rule"], acl_list=acl_list2)
self.create_NetworkAclRule(
self.test_data["icmprule"], acl_list=acl_list2)
self.debug("Creating an unmanaged VPC tier network with Static NAT")
vpc2_tier_unmngd = self.create_Network(self.nuage_vpc_network_offering,
gateway='10.2.0.1',
vpc=vpc2,
acl_list=acl_list2)
self.validate_Network(vpc2_tier_unmngd, state="Implemented")
# VPC Tier Network creation should fail as VPC is unmanaged already
with self.assertRaises(Exception):
self.create_Network(self.nuage_vpc_network_offering,
gateway='10.2.128.1',
vpc=vpc2,
acl_list=acl_list2,
externalid=subnet3.id)
vpc2_tier_unmngd.delete(self.api_client)
vpc2.delete(self.api_client)
# VPC tier network creation fails when cidr does not match on VSD
with self.assertRaises(Exception):
self.create_Network(self.nuage_vpc_network_offering,
gateway='10.1.1.1',
vpc=vpc,
acl_list=acl_list,
externalid=subnet1.id)
for i in range(1, 3):
self.debug("Creating a mngd VPC tier with Static NAT service")
vpc_tier = self.create_Network(self.nuage_vpc_network_offering,
gateway='10.1.0.1',
vpc=vpc,
acl_list=acl_list,
externalid=subnet1.id,
cleanup=False)
self.validate_Network(vpc_tier, state="Implemented")
self.debug("Creating 2nd VPC tier network with Static NAT service")
# VPC 2nd tier creation fails when cidr doesn't match on VSD
with self.assertRaises(Exception):
self.create_Network(self.nuage_vpc_network_offering,
gateway='10.1.129.1',
vpc=vpc,
acl_list=acl_list,
externalid=subnet2.id)
vpc_2ndtier = self.create_Network(self.nuage_vpc_network_offering,
gateway='10.1.128.1',
vpc=vpc,
acl_list=acl_list,
externalid=subnet2.id,
cleanup=False)
self.validate_Network(vpc_2ndtier, state="Implemented")
vpc_vr = self.get_Router(vpc_tier)
self.check_Router_state(vpc_vr, state="Running")
# VSD verification
self.verify_vsd_network_not_present(vpc_tier, vpc)
self.verify_vsd_network_not_present(vpc_2ndtier, vpc)
# On ACS create VPCTier network when VSDSubnet is already in use
with self.assertRaises(Exception):
self.create_Network(self.nuage_vpc_network_offering,
gateway='10.1.128.1',
vpc=vpc,
acl_list=acl_list,
externalid=subnet2.id)
# On ACS create VPCTier network when VSDSubnet does not exist
with self.assertRaises(Exception):
self.create_Network(self.nuage_vpc_network_offering,
gateway='10.1.128.1',
vpc=vpc,
acl_list=acl_list,
externalid=subnet2.id+1)
# On ACS create VPCTier network without VSDSubnet should fail
with self.assertRaises(Exception):
self.create_Network(self.nuage_vpc_network_offering,
gateway='10.1.203.1',
vpc=vpc,
acl_list=acl_list)
self.debug("Creating another VPC with Static NAT service provider "
"as VpcVirtualRouter With same CIDR")
vpc3 = self.create_Vpc(self.nuage_vpc_offering, cidr='10.1.0.0/16')
self.validate_Vpc(vpc3, state="Enabled")
acl_list3 = self.create_NetworkAclList(
name="acl", description="acl", vpc=vpc3)
self.create_NetworkAclRule(
self.test_data["ingress_rule"], acl_list=acl_list3)
self.create_NetworkAclRule(
self.test_data["icmprule"], acl_list=acl_list3)
self.debug("Creating a mngd VPC tier with Static NAT service")
vpc3_tier_unmngd = \
self.create_Network(self.nuage_vpc_network_offering,
gateway='10.1.0.1',
vpc=vpc3,
acl_list=acl_list3)
self.validate_Network(vpc3_tier_unmngd, state="Implemented")
vpc3_tier_unmngd.delete(self.api_client)
vpc3.delete(self.api_client)
self.debug("Deploying a VM in the created VPC tier network")
self.test_data["virtual_machine"]["displayname"] = "vpcvm1"
self.test_data["virtual_machine"]["name"] = "vpcvm1"
vpc_vm_1 = self.create_VM(vpc_tier, cleanup=False)
self.check_VM_state(vpc_vm_1, state="Running")
self.debug("Deploying another VM in the created VPC tier network")
self.test_data["virtual_machine"]["displayname"] = "vpcvm2"
self.test_data["virtual_machine"]["name"] = "vpcvm2"
vpc_vm_2 = self.create_VM(vpc_tier, cleanup=False)
self.check_VM_state(vpc_vm_2, state="Running")
self.debug("Deploying a VM in the 2nd VPC tier network")
self.test_data["virtual_machine"]["displayname"] = "vpcvm12"
self.test_data["virtual_machine"]["name"] = "vpcvm12"
vpc_vm_12 = self.create_VM(vpc_2ndtier, cleanup=False)
self.check_VM_state(vpc_vm_2, state="Running")
self.test_data["virtual_machine"]["displayname"] = None
self.test_data["virtual_machine"]["name"] = None
# VSD verification
self.verify_vsdmngd_vm(vpc_vm_1, subnet1)
self.verify_vsdmngd_vm(vpc_vm_2, subnet1)
self.verify_vsdmngd_vm(vpc_vm_12, subnet2)
self.debug("Creating Static NAT rule for the deployed VM "
"in the created VPC network...")
public_ip_1 = self.acquire_PublicIPAddress(vpc_tier, vpc=vpc)
self.validate_PublicIPAddress(public_ip_1, vpc_tier)
self.create_StaticNatRule_For_VM(vpc_vm_1, public_ip_1, vpc_tier)
self.validate_PublicIPAddress(
public_ip_1, vpc_tier, static_nat=True, vm=vpc_vm_1)
self.verify_ping_to_vm(vpc_vm_1, vpc_vm_2, public_ip_1)
self.verify_ping_to_vm(vpc_vm_1, vpc_vm_12, public_ip_1)
vpc_vm_1.delete(self.api_client, expunge=True)
vpc_vm_2.delete(self.api_client, expunge=True)
vpc_vm_12.delete(self.api_client, expunge=True)
vpc_tier.delete(self.api_client)
vpc_2ndtier.delete(self.api_client)
self.debug("Number of loops %s" % i)
@attr(tags=["advanced", "nuagevsp", "domains"], required_hardware="false")
def test_03_nuage_mngd_subnets_domains(self):
"""Test Nuage VSP Managed Subnets for ACS domains
"""
vsd_enterprise = self.create_vsd_enterprise()
vsd_domain_template = self.create_vsd_domain_template(vsd_enterprise)
self.create_vsd_default_acls(vsd_domain_template)
vsd_domain1 = self.create_vsd_domain(vsd_domain_template,
vsd_enterprise,
"L3DomainToBeConsumedByACS")
vsd_zone1 = self.create_vsd_zone(vsd_domain1, "ZoneToBeConsumedByACS")
vsd_subnet1 = self.create_vsd_subnet(vsd_zone1,
"SubnetToBeConsumedByACS",
"10.0.0.1/24")
acs_domain_1 = Domain.create(
self.api_client,
{},
name="DomainManagedbyVsd",
domainid=vsd_enterprise.id
)
# Create an admin and an user account under domain D1
acs_account_1 = Account.create(
self.api_client,
self.test_data["acl"]["accountD1"],
admin=True,
domainid=acs_domain_1.id
)
self.cleanup.append(acs_domain_1)
self.cleanup.append(acs_account_1)
# On ACS create network using non-persistent nw offering allow
isolated_network = self.create_Network(
self.nuage_isolated_network_offering,
gateway="10.0.0.1", netmask="255.255.255.0",
account=acs_account_1,
externalid=vsd_subnet1.id)
# Creation of a domain with inUse domain UUID is not allowed
with self.assertRaises(Exception):
Domain.create(
self.api_client,
{},
name="AnotherDomainManagedbyVsd",
domainid=vsd_enterprise.id
)
# Creation of a domain with unexisting domain UUID is not allowed
with self.assertRaises(Exception):
Domain.create(
self.api_client,
{},
name="YetAnotherDomainManagedbyVsd",
domainid=vsd_enterprise.id+1
)
vm_1 = self.create_VM(isolated_network, account=acs_account_1)
vm_2 = self.create_VM(isolated_network, account=acs_account_1)
# VSD verification
self.verify_vsd_network_not_present(isolated_network)
self.verify_vsdmngd_vm(vm_1, vsd_subnet1)
self.verify_vsdmngd_vm(vm_2, vsd_subnet1)
self.debug("Creating Static NAT rule for the deployed VM in the "
"non persistently created Isolated network...")
public_ip = self.acquire_PublicIPAddress(isolated_network,
account=acs_account_1)
self.validate_PublicIPAddress(public_ip, isolated_network)
self.create_StaticNatRule_For_VM(vm_1, public_ip, isolated_network)
self.validate_PublicIPAddress(
public_ip, isolated_network, static_nat=True, vm=vm_1)
self.create_FirewallRule(public_ip,
self.test_data["ingress_rule"])
if not self.isSimulator:
vm_public_ip = public_ip.ipaddress.ipaddress
try:
vm_1.ssh_ip = vm_public_ip
vm_1.ssh_port = self.test_data["virtual_machine"]["ssh_port"]
vm_1.username = self.test_data["virtual_machine"]["username"]
vm_1.password = self.test_data["virtual_machine"]["password"]
self.debug("SSHing into VM: %s with %s" %
(vm_1.ssh_ip, vm_1.password))
ssh = vm_1.get_ssh_client(ipaddress=vm_public_ip)
except Exception as e:
self.fail("SSH into VM failed with exception %s" % e)
self.verify_pingtovmipaddress(ssh, vm_2.ipaddress)
@attr(tags=["advanced", "nuagevsp", "account"], required_hardware="false")
def test_04_nuage_mngd_subnets_noadminaccount(self):
"""Test Nuage VSP Managed Subnets for ACS domains without admin account
"""
vsd_enterprise = self.create_vsd_enterprise()
vsd_domain_template = self.create_vsd_domain_template(vsd_enterprise)
self.create_vsd_default_acls(vsd_domain_template)
vsd_domain1 = self.create_vsd_domain(vsd_domain_template,
vsd_enterprise,
"L3DomainToBeConsumedByACS")
vsd_zone1 = self.create_vsd_zone(vsd_domain1, "ZoneToBeConsumedByACS")
vsd_subnet1 = self.create_vsd_subnet(vsd_zone1,
"SubnetToBeConsumedByACS",
"10.0.0.1/24")
acs_domain_1 = Domain.create(
self.api_client,
{},
name="DomainManagedbyVsd",
domainid=vsd_enterprise.id
)
# Create an no admin and an user account under domain D1
acs_account_1 = Account.create(
self.api_client,
self.test_data["acl"]["accountD1"],
admin=False,
domainid=acs_domain_1.id
)
self.cleanup.append(acs_domain_1)
self.cleanup.append(acs_account_1)
# On ACS create network fails as non admin account
with self.assertRaises(Exception):
self.create_Network(
self.nuage_isolated_network_offering,
gateway="10.0.0.1", netmask="255.255.255.0",
account=acs_account_1,
externalid=vsd_subnet1.id)
@needscleanup
def create_vsd_enterprise(self):
enterprise = self.vsdk.NUEnterprise()
enterprise.name = "EnterpriseToBeConsumedByACS"
enterprise.description = "EnterpriseToBeConsumedByACS"
(enterprise, connection) = self._session.user.create_child(enterprise)
return enterprise
def create_vsd_ingress_acl_template(self, domain_template,
priority_type="TOP"):
name = "Ingress ACL " + str(priority_type).capitalize()
acl_template = self.vsdk.NUIngressACLTemplate()
acl_template.name = name
acl_template.description = name
acl_template.priority_type = priority_type
acl_template.active = True
(acl_template, connection) = \
domain_template.create_child(acl_template)
return acl_template
def create_vsd_egress_acl_template(self, domain_template,
priority_type='TOP'):
name = "Egress ACL " + str(priority_type).capitalize()
acl_template = self.vsdk.NUEgressACLTemplate()
acl_template.name = name
acl_template.description = name
acl_template.priority_type = priority_type
acl_template.active = True
(acl_template, connection) = \
domain_template.create_child(acl_template)
return acl_template
@needscleanup
def create_vsd_domain_template(self, enterprise):
domain_template = self.vsdk.NUDomainTemplate()
domain_template.name = "L3DomainTemplateToBeConsumedByACS"
domain_template.description = "L3DomainTemplateToBeConsumedByACS"
(domain_template, connection) = \
enterprise.create_child(domain_template)
return domain_template
def create_vsd_default_acls(self, domain_template):
ingress_vsd_acl_template1 = self.create_vsd_ingress_acl_template(
domain_template, "TOP")
ingress_vsd_acl_template2 = self.create_vsd_ingress_acl_template(
domain_template, "BOTTOM")
ingress_vsd_acl_entry1 = self.vsdk.NUIngressACLEntryTemplate()
ingress_vsd_acl_entry1.name = "Default Intra-Subnet Allow"
ingress_vsd_acl_entry1.description = "Default Intra-Subnet Allow"
ingress_vsd_acl_entry1.priority = '1'
ingress_vsd_acl_entry1.protocol = 'ANY'
ingress_vsd_acl_template1.create_child(ingress_vsd_acl_entry1)
ingress_vsd_acl_entry2 = self.vsdk.NUIngressACLEntryTemplate()
ingress_vsd_acl_entry2.name = "Default Allow TCP"
ingress_vsd_acl_entry2.description = "Default Allow TCP"
ingress_vsd_acl_entry2.priority = '1'
ingress_vsd_acl_entry2.protocol = '6'
ingress_vsd_acl_entry2.source_port = '*'
ingress_vsd_acl_entry2.destination_port = '*'
ingress_vsd_acl_template2.create_child(ingress_vsd_acl_entry2)
ingress_vsd_acl_entry3 = self.vsdk.NUIngressACLEntryTemplate()
ingress_vsd_acl_entry3.name = "Default Allow UDP"
ingress_vsd_acl_entry3.description = "Default Allow UDP"
ingress_vsd_acl_entry3.priority = '2'
ingress_vsd_acl_entry3.protocol = '17'
ingress_vsd_acl_entry3.source_port = '*'
ingress_vsd_acl_entry3.destination_port = '*'
ingress_vsd_acl_template2.create_child(ingress_vsd_acl_entry3)
ingress_vsd_acl_entry4 = self.vsdk.NUIngressACLEntryTemplate()
ingress_vsd_acl_entry4.name = "Default Allow ICMP"
ingress_vsd_acl_entry4.description = "Default Allow ICMP"
ingress_vsd_acl_entry4.priority = '3'
ingress_vsd_acl_entry4.protocol = '1'
ingress_vsd_acl_template2.create_child(ingress_vsd_acl_entry4)
egress_vsd_acl_template1 = self.create_vsd_egress_acl_template(
domain_template, 'TOP')
egress_vsd_acl_template2 = self.create_vsd_egress_acl_template(
domain_template, 'BOTTOM')
egress_vsd_acl_entry1 = self.vsdk.NUEgressACLEntryTemplate()
egress_vsd_acl_entry1.name = "Default Intra-Subnet Allow"
egress_vsd_acl_entry1.description = "Default Intra-Subnet Allow"
egress_vsd_acl_entry1.priority = '1'
egress_vsd_acl_entry1.protocol = 'ANY'
egress_vsd_acl_template1.create_child(egress_vsd_acl_entry1)
egress_vsd_acl_entry2 = self.vsdk.NUEgressACLEntryTemplate()
egress_vsd_acl_entry2.name = "Default Allow ICMP"
egress_vsd_acl_entry2.description = "Default Allow ICMP"
egress_vsd_acl_entry2.priority = '3'
egress_vsd_acl_entry2.protocol = '1'
egress_vsd_acl_template2.create_child(egress_vsd_acl_entry2)
def create_vsd_domain(self, domain_template, enterprise, name):
domain = self.vsdk.NUDomain()
domain.name = name
domain.description = name
(domain, connection) = \
enterprise.instantiate_child(domain, domain_template)
return domain
def create_vsd_zone(self, domain, name):
zone = self.vsdk.NUZone()
zone.name = name
zone.description = name
(zone, connection) = domain.create_child(zone)
return zone
def create_vsd_subnet(self, zone, name, cidr):
subnet = self.vsdk.NUSubnet()
subnet.name = name
subnet.description = name
(subnet.gateway, subnet.netmask, subnet.address) = \
self._cidr_to_netmask(cidr)
(subnet, connection) = zone.create_child(subnet)
return subnet
def create_vsd_dhcp_option(self, subnet, type, value):
dhcp_option = self.vsdk.NUDHCPOption()
dhcp_option.actual_type = type
dhcp_option.actual_values = value
(dhcp_option, connection) = subnet.create_child(dhcp_option)
return dhcp_option
def _cidr_to_netmask(self, cidr):
import socket
import struct
network, net_bits = cidr.split('/')
host_bits = 32 - int(net_bits)
netmask_bits = (1 << 32) - (1 << host_bits)
netmask = socket.inet_ntoa(struct.pack('!I', netmask_bits))
network_bits = struct.unpack('!I', socket.inet_aton(network))[0]
network_masked = socket.inet_ntoa(
struct.pack('!I', netmask_bits & network_bits)
)
return network, netmask, network_masked
| [
"marvin.cloudstackAPI.updateZone.updateZoneCmd",
"marvin.lib.base.Domain.create",
"marvin.lib.base.Account.create",
"marvin.lib.base.VirtualMachine.list"
] | [((7390, 7461), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'nuagevsp', 'isonw']", 'required_hardware': '"""false"""'}), "(tags=['advanced', 'nuagevsp', 'isonw'], required_hardware='false')\n", (7394, 7461), False, 'from nose.plugins.attrib import attr\n'), ((14323, 14392), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'nuagevsp', 'vpc']", 'required_hardware': '"""false"""'}), "(tags=['advanced', 'nuagevsp', 'vpc'], required_hardware='false')\n", (14327, 14392), False, 'from nose.plugins.attrib import attr\n'), ((25205, 25278), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'nuagevsp', 'domains']", 'required_hardware': '"""false"""'}), "(tags=['advanced', 'nuagevsp', 'domains'], required_hardware='false')\n", (25209, 25278), False, 'from nose.plugins.attrib import attr\n'), ((29287, 29360), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'nuagevsp', 'account']", 'required_hardware': '"""false"""'}), "(tags=['advanced', 'nuagevsp', 'account'], required_hardware='false')\n", (29291, 29360), False, 'from nose.plugins.attrib import attr\n'), ((3636, 3735), 'marvin.lib.base.Account.create', 'Account.create', (['self.api_client', "self.test_data['account']"], {'admin': '(True)', 'domainid': 'self.domain.id'}), "(self.api_client, self.test_data['account'], admin=True,\n domainid=self.domain.id)\n", (3650, 3735), False, 'from marvin.lib.base import Account, Domain, VirtualMachine\n'), ((16415, 16441), 'marvin.cloudstackAPI.updateZone.updateZoneCmd', 'updateZone.updateZoneCmd', ([], {}), '()\n', (16439, 16441), False, 'from marvin.cloudstackAPI import updateZone\n'), ((26082, 26176), 'marvin.lib.base.Domain.create', 'Domain.create', (['self.api_client', '{}'], {'name': '"""DomainManagedbyVsd"""', 'domainid': 'vsd_enterprise.id'}), "(self.api_client, {}, name='DomainManagedbyVsd', domainid=\n vsd_enterprise.id)\n", (26095, 26176), False, 'from marvin.lib.base import Account, Domain, VirtualMachine\n'), ((26340, 26450), 'marvin.lib.base.Account.create', 'Account.create', (['self.api_client', "self.test_data['acl']['accountD1']"], {'admin': '(True)', 'domainid': 'acs_domain_1.id'}), "(self.api_client, self.test_data['acl']['accountD1'], admin=\n True, domainid=acs_domain_1.id)\n", (26354, 26450), False, 'from marvin.lib.base import Account, Domain, VirtualMachine\n'), ((30193, 30287), 'marvin.lib.base.Domain.create', 'Domain.create', (['self.api_client', '{}'], {'name': '"""DomainManagedbyVsd"""', 'domainid': 'vsd_enterprise.id'}), "(self.api_client, {}, name='DomainManagedbyVsd', domainid=\n vsd_enterprise.id)\n", (30206, 30287), False, 'from marvin.lib.base import Account, Domain, VirtualMachine\n'), ((30446, 30557), 'marvin.lib.base.Account.create', 'Account.create', (['self.api_client', "self.test_data['acl']['accountD1']"], {'admin': '(False)', 'domainid': 'acs_domain_1.id'}), "(self.api_client, self.test_data['acl']['accountD1'], admin=\n False, domainid=acs_domain_1.id)\n", (30460, 30557), False, 'from marvin.lib.base import Account, Domain, VirtualMachine\n'), ((6425, 6471), 'marvin.lib.base.VirtualMachine.list', 'VirtualMachine.list', (['self.api_client'], {'id': 'vm.id'}), '(self.api_client, id=vm.id)\n', (6444, 6471), False, 'from marvin.lib.base import Account, Domain, VirtualMachine\n'), ((27055, 27155), 'marvin.lib.base.Domain.create', 'Domain.create', (['self.api_client', '{}'], {'name': '"""AnotherDomainManagedbyVsd"""', 'domainid': 'vsd_enterprise.id'}), "(self.api_client, {}, name='AnotherDomainManagedbyVsd',\n domainid=vsd_enterprise.id)\n", (27068, 27155), False, 'from marvin.lib.base import Account, Domain, VirtualMachine\n'), ((27376, 27483), 'marvin.lib.base.Domain.create', 'Domain.create', (['self.api_client', '{}'], {'name': '"""YetAnotherDomainManagedbyVsd"""', 'domainid': '(vsd_enterprise.id + 1)'}), "(self.api_client, {}, name='YetAnotherDomainManagedbyVsd',\n domainid=vsd_enterprise.id + 1)\n", (27389, 27483), False, 'from marvin.lib.base import Account, Domain, VirtualMachine\n'), ((37066, 37097), 'struct.pack', 'struct.pack', (['"""!I"""', 'netmask_bits'], {}), "('!I', netmask_bits)\n", (37077, 37097), False, 'import struct\n'), ((37227, 37273), 'struct.pack', 'struct.pack', (['"""!I"""', '(netmask_bits & network_bits)'], {}), "('!I', netmask_bits & network_bits)\n", (37238, 37273), False, 'import struct\n'), ((5738, 5751), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (5748, 5751), False, 'import time\n'), ((37142, 37167), 'socket.inet_aton', 'socket.inet_aton', (['network'], {}), '(network)\n', (37158, 37167), False, 'import socket\n')] |
"# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreeme(...TRUNCATED) | ["marvin.lib.common.list_snapshots","marvin.lib.utils.is_snapshot_on_nfs","marvin.lib.common.list_vo(...TRUNCATED) | "[((7341, 7399), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': \"['advanced', 'basic']\", 'requi(...TRUNCATED) |
"# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreeme(...TRUNCATED) | ["marvin.cloudstackAPI.listKubernetesClusters.listKubernetesClustersCmd","marvin.lib.base.StoragePoo(...TRUNCATED) | "[((16124, 16182), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': \"['advanced', 'smoke']\", 'req(...TRUNCATED) |
"# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreeme(...TRUNCATED) | ["marvin.lib.utils.get_process_status","marvin.lib.base.Domain.create","marvin.cloudstackAPI.rebootR(...TRUNCATED) | "[((9746, 9796), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': \"['advanced']\", 'required_hardw(...TRUNCATED) |
"# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreeme(...TRUNCATED) | ["marvin.lib.base.Domain.create","marvin.cloudstackAPI.listConfigurations.listConfigurationsCmd","ma(...TRUNCATED) | "[((7684, 7749), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': \"['advanced', 'basic', 'sg']\", (...TRUNCATED) |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 5