repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
MAVENSDC/cdflib | cdflib/cdfread.py | CDF.epochrange | def epochrange(self, epoch=None, starttime=None, endtime=None):
"""
Get epoch range.
Returns a list of the record numbers, representing the
corresponding starting and ending records within the time
range from the epoch data. A None is returned if there is no
data either written or found in the time range.
"""
return self.varget(variable=epoch, starttime=starttime,
endtime=endtime, record_range_only=True) | python | def epochrange(self, epoch=None, starttime=None, endtime=None):
"""
Get epoch range.
Returns a list of the record numbers, representing the
corresponding starting and ending records within the time
range from the epoch data. A None is returned if there is no
data either written or found in the time range.
"""
return self.varget(variable=epoch, starttime=starttime,
endtime=endtime, record_range_only=True) | [
"def",
"epochrange",
"(",
"self",
",",
"epoch",
"=",
"None",
",",
"starttime",
"=",
"None",
",",
"endtime",
"=",
"None",
")",
":",
"return",
"self",
".",
"varget",
"(",
"variable",
"=",
"epoch",
",",
"starttime",
"=",
"starttime",
",",
"endtime",
"=",
... | Get epoch range.
Returns a list of the record numbers, representing the
corresponding starting and ending records within the time
range from the epoch data. A None is returned if there is no
data either written or found in the time range. | [
"Get",
"epoch",
"range",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L540-L550 | train | 208,600 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF.globalattsget | def globalattsget(self, expand=False, to_np=True):
"""
Gets all global attributes.
This function returns all of the global attribute entries,
in a dictionary (in the form of 'attribute': {entry: value}
pair) from a CDF. If there is no entry found, None is
returned. If expand is entered with non-False, then each
entry's data type is also returned in a list form as
[entry, 'CDF_xxxx']. For attributes without any entries,
they will also return with None value.
"""
byte_loc = self._first_adr
return_dict = {}
for _ in range(0, self._num_att):
adr_info = self._read_adr(byte_loc)
if (adr_info['scope'] != 1):
byte_loc = adr_info['next_adr_location']
continue
if (adr_info['num_gr_entry'] == 0):
if (expand is not False):
return_dict[adr_info['name']] = None
byte_loc = adr_info['next_adr_location']
continue
if (expand is False):
entries = []
else:
entries = {}
aedr_byte_loc = adr_info['first_gr_entry']
for _ in range(0, adr_info['num_gr_entry']):
if (self.cdfversion == 3):
aedr_info = self._read_aedr(aedr_byte_loc, to_np=to_np)
else:
aedr_info = self._read_aedr2(aedr_byte_loc, to_np=to_np)
entryData = aedr_info['entry']
if (expand is False):
entries.append(entryData)
else:
entryWithType = []
if (isinstance(entryData, str)):
entryWithType.append(entryData)
else:
dataType = aedr_info['data_type']
if (len(entryData.tolist()) == 1):
if (dataType != 31 and dataType != 32 and dataType != 33):
entryWithType.append(entryData.tolist()[0])
else:
if (dataType != 33):
entryWithType.append(epoch.CDFepoch.encode(entryData.tolist()[0],
iso_8601=False))
else:
entryWithType.append(epoch.CDFepoch.encode(entryData.tolist()[0]))
else:
if (dataType != 31 and dataType != 32 and dataType != 33):
entryWithType.append(entryData.tolist())
else:
if (dataType != 33):
entryWithType.append(epoch.CDFepoch.encode(entryData.tolist(),
iso_8601=False))
else:
entryWithType.append(epoch.CDFepoch.encode(entryData.tolist()))
entryWithType.append(CDF._datatype_token(aedr_info['data_type']))
entries[aedr_info['entry_num']] = entryWithType
aedr_byte_loc = aedr_info['next_aedr']
if (len(entries) != 0):
if (expand is False):
if (len(entries) == 1):
return_dict[adr_info['name']] = entries[0]
else:
return_dict[adr_info['name']] = entries
else:
return_dict[adr_info['name']] = entries
byte_loc = adr_info['next_adr_location']
return return_dict | python | def globalattsget(self, expand=False, to_np=True):
"""
Gets all global attributes.
This function returns all of the global attribute entries,
in a dictionary (in the form of 'attribute': {entry: value}
pair) from a CDF. If there is no entry found, None is
returned. If expand is entered with non-False, then each
entry's data type is also returned in a list form as
[entry, 'CDF_xxxx']. For attributes without any entries,
they will also return with None value.
"""
byte_loc = self._first_adr
return_dict = {}
for _ in range(0, self._num_att):
adr_info = self._read_adr(byte_loc)
if (adr_info['scope'] != 1):
byte_loc = adr_info['next_adr_location']
continue
if (adr_info['num_gr_entry'] == 0):
if (expand is not False):
return_dict[adr_info['name']] = None
byte_loc = adr_info['next_adr_location']
continue
if (expand is False):
entries = []
else:
entries = {}
aedr_byte_loc = adr_info['first_gr_entry']
for _ in range(0, adr_info['num_gr_entry']):
if (self.cdfversion == 3):
aedr_info = self._read_aedr(aedr_byte_loc, to_np=to_np)
else:
aedr_info = self._read_aedr2(aedr_byte_loc, to_np=to_np)
entryData = aedr_info['entry']
if (expand is False):
entries.append(entryData)
else:
entryWithType = []
if (isinstance(entryData, str)):
entryWithType.append(entryData)
else:
dataType = aedr_info['data_type']
if (len(entryData.tolist()) == 1):
if (dataType != 31 and dataType != 32 and dataType != 33):
entryWithType.append(entryData.tolist()[0])
else:
if (dataType != 33):
entryWithType.append(epoch.CDFepoch.encode(entryData.tolist()[0],
iso_8601=False))
else:
entryWithType.append(epoch.CDFepoch.encode(entryData.tolist()[0]))
else:
if (dataType != 31 and dataType != 32 and dataType != 33):
entryWithType.append(entryData.tolist())
else:
if (dataType != 33):
entryWithType.append(epoch.CDFepoch.encode(entryData.tolist(),
iso_8601=False))
else:
entryWithType.append(epoch.CDFepoch.encode(entryData.tolist()))
entryWithType.append(CDF._datatype_token(aedr_info['data_type']))
entries[aedr_info['entry_num']] = entryWithType
aedr_byte_loc = aedr_info['next_aedr']
if (len(entries) != 0):
if (expand is False):
if (len(entries) == 1):
return_dict[adr_info['name']] = entries[0]
else:
return_dict[adr_info['name']] = entries
else:
return_dict[adr_info['name']] = entries
byte_loc = adr_info['next_adr_location']
return return_dict | [
"def",
"globalattsget",
"(",
"self",
",",
"expand",
"=",
"False",
",",
"to_np",
"=",
"True",
")",
":",
"byte_loc",
"=",
"self",
".",
"_first_adr",
"return_dict",
"=",
"{",
"}",
"for",
"_",
"in",
"range",
"(",
"0",
",",
"self",
".",
"_num_att",
")",
... | Gets all global attributes.
This function returns all of the global attribute entries,
in a dictionary (in the form of 'attribute': {entry: value}
pair) from a CDF. If there is no entry found, None is
returned. If expand is entered with non-False, then each
entry's data type is also returned in a list form as
[entry, 'CDF_xxxx']. For attributes without any entries,
they will also return with None value. | [
"Gets",
"all",
"global",
"attributes",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L552-L628 | train | 208,601 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF.varattsget | def varattsget(self, variable=None, expand=False, to_np=True):
"""
Gets all variable attributes.
Unlike attget, which returns a single attribute entry value,
this function returns all of the variable attribute entries,
in a dictionary (in the form of 'attribute': value pair) for
a variable. If there is no entry found, None is returned.
If no variable name is provided, a list of variables are printed.
If expand is entered with non-False, then each entry's data
type is also returned in a list form as [entry, 'CDF_xxxx'].
For attributes without any entries, they will also return with
None value.
"""
if (isinstance(variable, int) and self._num_zvariable > 0 and self._num_rvariable > 0):
print('This CDF has both r and z variables. Use variable name')
return None
if isinstance(variable, str):
position = self._first_zvariable
num_variables = self._num_zvariable
for zVar in [1, 0]:
for _ in range(0, num_variables):
if (self.cdfversion == 3):
name, vdr_next = self._read_vdr_fast(position)
else:
name, vdr_next = self._read_vdr_fast2(position)
if name.strip().lower() == variable.strip().lower():
if (self.cdfversion == 3):
vdr_info = self._read_vdr(position)
else:
vdr_info = self._read_vdr2(position)
return self._read_varatts(vdr_info['variable_number'], zVar, expand, to_np=to_np)
position = vdr_next
position = self._first_rvariable
num_variables = self._num_rvariable
print('No variable by this name:', variable)
return None
elif isinstance(variable, int):
if self._num_zvariable > 0:
num_variable = self._num_zvariable
zVar = True
else:
num_variable = self._num_rvariable
zVar = False
if (variable < 0 or variable >= num_variable):
print('No variable by this number:', variable)
return None
return self._read_varatts(variable, zVar, expand, to_np=to_np)
else:
print('Please set variable keyword equal to the name or ',
'number of an variable')
rvars, zvars = self._get_varnames()
print("RVARIABLES: ")
for x in rvars:
print("NAME: " + str(x))
print("ZVARIABLES: ")
for x in zvars:
print("NAME: " + str(x))
return | python | def varattsget(self, variable=None, expand=False, to_np=True):
"""
Gets all variable attributes.
Unlike attget, which returns a single attribute entry value,
this function returns all of the variable attribute entries,
in a dictionary (in the form of 'attribute': value pair) for
a variable. If there is no entry found, None is returned.
If no variable name is provided, a list of variables are printed.
If expand is entered with non-False, then each entry's data
type is also returned in a list form as [entry, 'CDF_xxxx'].
For attributes without any entries, they will also return with
None value.
"""
if (isinstance(variable, int) and self._num_zvariable > 0 and self._num_rvariable > 0):
print('This CDF has both r and z variables. Use variable name')
return None
if isinstance(variable, str):
position = self._first_zvariable
num_variables = self._num_zvariable
for zVar in [1, 0]:
for _ in range(0, num_variables):
if (self.cdfversion == 3):
name, vdr_next = self._read_vdr_fast(position)
else:
name, vdr_next = self._read_vdr_fast2(position)
if name.strip().lower() == variable.strip().lower():
if (self.cdfversion == 3):
vdr_info = self._read_vdr(position)
else:
vdr_info = self._read_vdr2(position)
return self._read_varatts(vdr_info['variable_number'], zVar, expand, to_np=to_np)
position = vdr_next
position = self._first_rvariable
num_variables = self._num_rvariable
print('No variable by this name:', variable)
return None
elif isinstance(variable, int):
if self._num_zvariable > 0:
num_variable = self._num_zvariable
zVar = True
else:
num_variable = self._num_rvariable
zVar = False
if (variable < 0 or variable >= num_variable):
print('No variable by this number:', variable)
return None
return self._read_varatts(variable, zVar, expand, to_np=to_np)
else:
print('Please set variable keyword equal to the name or ',
'number of an variable')
rvars, zvars = self._get_varnames()
print("RVARIABLES: ")
for x in rvars:
print("NAME: " + str(x))
print("ZVARIABLES: ")
for x in zvars:
print("NAME: " + str(x))
return | [
"def",
"varattsget",
"(",
"self",
",",
"variable",
"=",
"None",
",",
"expand",
"=",
"False",
",",
"to_np",
"=",
"True",
")",
":",
"if",
"(",
"isinstance",
"(",
"variable",
",",
"int",
")",
"and",
"self",
".",
"_num_zvariable",
">",
"0",
"and",
"self"... | Gets all variable attributes.
Unlike attget, which returns a single attribute entry value,
this function returns all of the variable attribute entries,
in a dictionary (in the form of 'attribute': value pair) for
a variable. If there is no entry found, None is returned.
If no variable name is provided, a list of variables are printed.
If expand is entered with non-False, then each entry's data
type is also returned in a list form as [entry, 'CDF_xxxx'].
For attributes without any entries, they will also return with
None value. | [
"Gets",
"all",
"variable",
"attributes",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L630-L688 | train | 208,602 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._uncompress_file | def _uncompress_file(self, path):
'''
Writes the current file into a file in the temporary directory.
If that doesn't work, create a new file in the CDFs directory.
'''
with self.file.open('rb') as f:
if (self.cdfversion == 3):
data_start, data_size, cType, _ = self._read_ccr(8)
else:
data_start, data_size, cType, _ = self._read_ccr2(8)
if cType != 5:
return
f.seek(data_start)
decompressed_data = gzip.decompress(f.read(data_size))
newpath = pathlib.Path(tempfile.NamedTemporaryFile(suffix='.cdf').name)
with newpath.open('wb') as g:
g.write(bytearray.fromhex('cdf30001'))
g.write(bytearray.fromhex('0000ffff'))
g.write(decompressed_data)
return newpath | python | def _uncompress_file(self, path):
'''
Writes the current file into a file in the temporary directory.
If that doesn't work, create a new file in the CDFs directory.
'''
with self.file.open('rb') as f:
if (self.cdfversion == 3):
data_start, data_size, cType, _ = self._read_ccr(8)
else:
data_start, data_size, cType, _ = self._read_ccr2(8)
if cType != 5:
return
f.seek(data_start)
decompressed_data = gzip.decompress(f.read(data_size))
newpath = pathlib.Path(tempfile.NamedTemporaryFile(suffix='.cdf').name)
with newpath.open('wb') as g:
g.write(bytearray.fromhex('cdf30001'))
g.write(bytearray.fromhex('0000ffff'))
g.write(decompressed_data)
return newpath | [
"def",
"_uncompress_file",
"(",
"self",
",",
"path",
")",
":",
"with",
"self",
".",
"file",
".",
"open",
"(",
"'rb'",
")",
"as",
"f",
":",
"if",
"(",
"self",
".",
"cdfversion",
"==",
"3",
")",
":",
"data_start",
",",
"data_size",
",",
"cType",
",",... | Writes the current file into a file in the temporary directory.
If that doesn't work, create a new file in the CDFs directory. | [
"Writes",
"the",
"current",
"file",
"into",
"a",
"file",
"in",
"the",
"temporary",
"directory",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L690-L714 | train | 208,603 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._convert_option | def _convert_option(self):
'''
Determines how to convert CDF byte ordering to the system
byte ordering.
'''
if sys.byteorder == 'little' and self._endian() == 'big-endian':
# big->little
order = '>'
elif sys.byteorder == 'big' and self._endian() == 'little-endian':
# little->big
order = '<'
else:
# no conversion
order = '='
return order | python | def _convert_option(self):
'''
Determines how to convert CDF byte ordering to the system
byte ordering.
'''
if sys.byteorder == 'little' and self._endian() == 'big-endian':
# big->little
order = '>'
elif sys.byteorder == 'big' and self._endian() == 'little-endian':
# little->big
order = '<'
else:
# no conversion
order = '='
return order | [
"def",
"_convert_option",
"(",
"self",
")",
":",
"if",
"sys",
".",
"byteorder",
"==",
"'little'",
"and",
"self",
".",
"_endian",
"(",
")",
"==",
"'big-endian'",
":",
"# big->little",
"order",
"=",
"'>'",
"elif",
"sys",
".",
"byteorder",
"==",
"'big'",
"a... | Determines how to convert CDF byte ordering to the system
byte ordering. | [
"Determines",
"how",
"to",
"convert",
"CDF",
"byte",
"ordering",
"to",
"the",
"system",
"byte",
"ordering",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L1726-L1741 | train | 208,604 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._num_values | def _num_values(self, vdr_dict):
'''
Returns the number of values in a record, using a given VDR
dictionary. Multiplies the dimension sizes of each dimension,
if it is varying.
'''
values = 1
for x in range(0, vdr_dict['num_dims']):
if (vdr_dict['dim_vary'][x] != 0):
values = values * vdr_dict['dim_sizes'][x]
return values | python | def _num_values(self, vdr_dict):
'''
Returns the number of values in a record, using a given VDR
dictionary. Multiplies the dimension sizes of each dimension,
if it is varying.
'''
values = 1
for x in range(0, vdr_dict['num_dims']):
if (vdr_dict['dim_vary'][x] != 0):
values = values * vdr_dict['dim_sizes'][x]
return values | [
"def",
"_num_values",
"(",
"self",
",",
"vdr_dict",
")",
":",
"values",
"=",
"1",
"for",
"x",
"in",
"range",
"(",
"0",
",",
"vdr_dict",
"[",
"'num_dims'",
"]",
")",
":",
"if",
"(",
"vdr_dict",
"[",
"'dim_vary'",
"]",
"[",
"x",
"]",
"!=",
"0",
")"... | Returns the number of values in a record, using a given VDR
dictionary. Multiplies the dimension sizes of each dimension,
if it is varying. | [
"Returns",
"the",
"number",
"of",
"values",
"in",
"a",
"record",
"using",
"a",
"given",
"VDR",
"dictionary",
".",
"Multiplies",
"the",
"dimension",
"sizes",
"of",
"each",
"dimension",
"if",
"it",
"is",
"varying",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L1913-L1923 | train | 208,605 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._convert_type | def _convert_type(self, data_type):
'''
CDF data types to python struct data types
'''
if (data_type == 1) or (data_type == 41):
dt_string = 'b'
elif data_type == 2:
dt_string = 'h'
elif data_type == 4:
dt_string = 'i'
elif (data_type == 8) or (data_type == 33):
dt_string = 'q'
elif data_type == 11:
dt_string = 'B'
elif data_type == 12:
dt_string = 'H'
elif data_type == 14:
dt_string = 'I'
elif (data_type == 21) or (data_type == 44):
dt_string = 'f'
elif (data_type == 22) or (data_type == 45) or (data_type == 31):
dt_string = 'd'
elif (data_type == 32):
dt_string = 'd'
elif (data_type == 51) or (data_type == 52):
dt_string = 's'
return dt_string | python | def _convert_type(self, data_type):
'''
CDF data types to python struct data types
'''
if (data_type == 1) or (data_type == 41):
dt_string = 'b'
elif data_type == 2:
dt_string = 'h'
elif data_type == 4:
dt_string = 'i'
elif (data_type == 8) or (data_type == 33):
dt_string = 'q'
elif data_type == 11:
dt_string = 'B'
elif data_type == 12:
dt_string = 'H'
elif data_type == 14:
dt_string = 'I'
elif (data_type == 21) or (data_type == 44):
dt_string = 'f'
elif (data_type == 22) or (data_type == 45) or (data_type == 31):
dt_string = 'd'
elif (data_type == 32):
dt_string = 'd'
elif (data_type == 51) or (data_type == 52):
dt_string = 's'
return dt_string | [
"def",
"_convert_type",
"(",
"self",
",",
"data_type",
")",
":",
"if",
"(",
"data_type",
"==",
"1",
")",
"or",
"(",
"data_type",
"==",
"41",
")",
":",
"dt_string",
"=",
"'b'",
"elif",
"data_type",
"==",
"2",
":",
"dt_string",
"=",
"'h'",
"elif",
"dat... | CDF data types to python struct data types | [
"CDF",
"data",
"types",
"to",
"python",
"struct",
"data",
"types"
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L2072-L2098 | train | 208,606 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._default_pad | def _default_pad(self, data_type, num_elms): # @NoSelf
'''
The default pad values by CDF data type
'''
order = self._convert_option()
if (data_type == 51 or data_type == 52):
return str(' '*num_elms)
if (data_type == 1) or (data_type == 41):
pad_value = struct.pack(order+'b', -127)
dt_string = 'i1'
elif data_type == 2:
pad_value = struct.pack(order+'h', -32767)
dt_string = 'i2'
elif data_type == 4:
pad_value = struct.pack(order+'i', -2147483647)
dt_string = 'i4'
elif (data_type == 8) or (data_type == 33):
pad_value = struct.pack(order+'q', -9223372036854775807)
dt_string = 'i8'
elif data_type == 11:
pad_value = struct.pack(order+'B', 254)
dt_string = 'u1'
elif data_type == 12:
pad_value = struct.pack(order+'H', 65534)
dt_string = 'u2'
elif data_type == 14:
pad_value = struct.pack(order+'I', 4294967294)
dt_string = 'u4'
elif (data_type == 21) or (data_type == 44):
pad_value = struct.pack(order+'f', -1.0E30)
dt_string = 'f'
elif (data_type == 22) or (data_type == 45) or (data_type == 31):
pad_value = struct.pack(order+'d', -1.0E30)
dt_string = 'd'
else:
# (data_type == 32):
pad_value = struct.pack(order+'2d', *[-1.0E30, -1.0E30])
dt_string = 'c16'
dt = np.dtype(dt_string)
ret = np.frombuffer(pad_value, dtype=dt, count=1)
ret.setflags('WRITEABLE')
return ret | python | def _default_pad(self, data_type, num_elms): # @NoSelf
'''
The default pad values by CDF data type
'''
order = self._convert_option()
if (data_type == 51 or data_type == 52):
return str(' '*num_elms)
if (data_type == 1) or (data_type == 41):
pad_value = struct.pack(order+'b', -127)
dt_string = 'i1'
elif data_type == 2:
pad_value = struct.pack(order+'h', -32767)
dt_string = 'i2'
elif data_type == 4:
pad_value = struct.pack(order+'i', -2147483647)
dt_string = 'i4'
elif (data_type == 8) or (data_type == 33):
pad_value = struct.pack(order+'q', -9223372036854775807)
dt_string = 'i8'
elif data_type == 11:
pad_value = struct.pack(order+'B', 254)
dt_string = 'u1'
elif data_type == 12:
pad_value = struct.pack(order+'H', 65534)
dt_string = 'u2'
elif data_type == 14:
pad_value = struct.pack(order+'I', 4294967294)
dt_string = 'u4'
elif (data_type == 21) or (data_type == 44):
pad_value = struct.pack(order+'f', -1.0E30)
dt_string = 'f'
elif (data_type == 22) or (data_type == 45) or (data_type == 31):
pad_value = struct.pack(order+'d', -1.0E30)
dt_string = 'd'
else:
# (data_type == 32):
pad_value = struct.pack(order+'2d', *[-1.0E30, -1.0E30])
dt_string = 'c16'
dt = np.dtype(dt_string)
ret = np.frombuffer(pad_value, dtype=dt, count=1)
ret.setflags('WRITEABLE')
return ret | [
"def",
"_default_pad",
"(",
"self",
",",
"data_type",
",",
"num_elms",
")",
":",
"# @NoSelf",
"order",
"=",
"self",
".",
"_convert_option",
"(",
")",
"if",
"(",
"data_type",
"==",
"51",
"or",
"data_type",
"==",
"52",
")",
":",
"return",
"str",
"(",
"' ... | The default pad values by CDF data type | [
"The",
"default",
"pad",
"values",
"by",
"CDF",
"data",
"type"
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L2100-L2142 | train | 208,607 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._convert_np_data | def _convert_np_data(data, data_type, num_elems): # @NoSelf
'''
Converts a single np data into byte stream.
'''
if (data_type == 51 or data_type == 52):
if (data == ''):
return ('\x00'*num_elems).encode()
else:
return data.ljust(num_elems, '\x00').encode('utf-8')
elif (data_type == 32):
data_stream = data.real.tobytes()
data_stream += data.imag.tobytes()
return data_stream
else:
return data.tobytes() | python | def _convert_np_data(data, data_type, num_elems): # @NoSelf
'''
Converts a single np data into byte stream.
'''
if (data_type == 51 or data_type == 52):
if (data == ''):
return ('\x00'*num_elems).encode()
else:
return data.ljust(num_elems, '\x00').encode('utf-8')
elif (data_type == 32):
data_stream = data.real.tobytes()
data_stream += data.imag.tobytes()
return data_stream
else:
return data.tobytes() | [
"def",
"_convert_np_data",
"(",
"data",
",",
"data_type",
",",
"num_elems",
")",
":",
"# @NoSelf",
"if",
"(",
"data_type",
"==",
"51",
"or",
"data_type",
"==",
"52",
")",
":",
"if",
"(",
"data",
"==",
"''",
")",
":",
"return",
"(",
"'\\x00'",
"*",
"n... | Converts a single np data into byte stream. | [
"Converts",
"a",
"single",
"np",
"data",
"into",
"byte",
"stream",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L2144-L2158 | train | 208,608 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._read_vvr_block | def _read_vvr_block(self, offset):
'''
Returns a VVR or decompressed CVVR block
'''
with self.file.open('rb') as f:
f.seek(offset, 0)
block_size = int.from_bytes(f.read(8), 'big')
block = f.read(block_size-8)
section_type = int.from_bytes(block[0:4], 'big')
if section_type == 13:
# a CVVR
compressed_size = int.from_bytes(block[12:16], 'big')
return gzip.decompress(block[16:16+compressed_size])
elif section_type == 7:
# a VVR
return block[4:] | python | def _read_vvr_block(self, offset):
'''
Returns a VVR or decompressed CVVR block
'''
with self.file.open('rb') as f:
f.seek(offset, 0)
block_size = int.from_bytes(f.read(8), 'big')
block = f.read(block_size-8)
section_type = int.from_bytes(block[0:4], 'big')
if section_type == 13:
# a CVVR
compressed_size = int.from_bytes(block[12:16], 'big')
return gzip.decompress(block[16:16+compressed_size])
elif section_type == 7:
# a VVR
return block[4:] | [
"def",
"_read_vvr_block",
"(",
"self",
",",
"offset",
")",
":",
"with",
"self",
".",
"file",
".",
"open",
"(",
"'rb'",
")",
"as",
"f",
":",
"f",
".",
"seek",
"(",
"offset",
",",
"0",
")",
"block_size",
"=",
"int",
".",
"from_bytes",
"(",
"f",
"."... | Returns a VVR or decompressed CVVR block | [
"Returns",
"a",
"VVR",
"or",
"decompressed",
"CVVR",
"block"
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L2160-L2176 | train | 208,609 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._find_block | def _find_block(starts, ends, cur_block, rec_num): # @NoSelf
'''
Finds the block that rec_num is in if it is found. Otherwise it returns -1.
It also returns the block that has the physical data either at or
preceeding the rec_num.
It could be -1 if the preceeding block does not exists.
'''
total = len(starts)
if (cur_block == -1):
cur_block = 0
for x in range(cur_block, total):
if (starts[x] <= rec_num and ends[x] >= rec_num):
return x, x
if (starts[x] > rec_num):
break
return -1, x-1 | python | def _find_block(starts, ends, cur_block, rec_num): # @NoSelf
'''
Finds the block that rec_num is in if it is found. Otherwise it returns -1.
It also returns the block that has the physical data either at or
preceeding the rec_num.
It could be -1 if the preceeding block does not exists.
'''
total = len(starts)
if (cur_block == -1):
cur_block = 0
for x in range(cur_block, total):
if (starts[x] <= rec_num and ends[x] >= rec_num):
return x, x
if (starts[x] > rec_num):
break
return -1, x-1 | [
"def",
"_find_block",
"(",
"starts",
",",
"ends",
",",
"cur_block",
",",
"rec_num",
")",
":",
"# @NoSelf",
"total",
"=",
"len",
"(",
"starts",
")",
"if",
"(",
"cur_block",
"==",
"-",
"1",
")",
":",
"cur_block",
"=",
"0",
"for",
"x",
"in",
"range",
... | Finds the block that rec_num is in if it is found. Otherwise it returns -1.
It also returns the block that has the physical data either at or
preceeding the rec_num.
It could be -1 if the preceeding block does not exists. | [
"Finds",
"the",
"block",
"that",
"rec_num",
"is",
"in",
"if",
"it",
"is",
"found",
".",
"Otherwise",
"it",
"returns",
"-",
"1",
".",
"It",
"also",
"returns",
"the",
"block",
"that",
"has",
"the",
"physical",
"data",
"either",
"at",
"or",
"preceeding",
... | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L2196-L2211 | train | 208,610 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF._convert_data | def _convert_data(self, data, data_type, num_recs, num_values, num_elems):
'''
Converts data to the appropriate type using the struct.unpack method,
rather than using numpy.
'''
if (data_type == 51 or data_type == 52):
return [data[i:i+num_elems].decode('utf-8') for i in
range(0, num_recs*num_values*num_elems, num_elems)]
else:
tofrom = self._convert_option()
dt_string = self._convert_type(data_type)
form = tofrom + str(num_recs*num_values*num_elems) + dt_string
value_len = CDF._type_size(data_type, num_elems)
return list(struct.unpack_from(form,
data[0:num_recs*num_values*value_len])) | python | def _convert_data(self, data, data_type, num_recs, num_values, num_elems):
'''
Converts data to the appropriate type using the struct.unpack method,
rather than using numpy.
'''
if (data_type == 51 or data_type == 52):
return [data[i:i+num_elems].decode('utf-8') for i in
range(0, num_recs*num_values*num_elems, num_elems)]
else:
tofrom = self._convert_option()
dt_string = self._convert_type(data_type)
form = tofrom + str(num_recs*num_values*num_elems) + dt_string
value_len = CDF._type_size(data_type, num_elems)
return list(struct.unpack_from(form,
data[0:num_recs*num_values*value_len])) | [
"def",
"_convert_data",
"(",
"self",
",",
"data",
",",
"data_type",
",",
"num_recs",
",",
"num_values",
",",
"num_elems",
")",
":",
"if",
"(",
"data_type",
"==",
"51",
"or",
"data_type",
"==",
"52",
")",
":",
"return",
"[",
"data",
"[",
"i",
":",
"i"... | Converts data to the appropriate type using the struct.unpack method,
rather than using numpy. | [
"Converts",
"data",
"to",
"the",
"appropriate",
"type",
"using",
"the",
"struct",
".",
"unpack",
"method",
"rather",
"than",
"using",
"numpy",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L2213-L2228 | train | 208,611 |
MAVENSDC/cdflib | cdflib/cdfread.py | CDF.getVersion | def getVersion(): # @NoSelf
"""
Shows the code version and last modified date.
"""
print('CDFread version:', str(CDF.version) + '.' + str(CDF.release) +
'.' + str(CDF.increment))
print('Date: 2018/01/11') | python | def getVersion(): # @NoSelf
"""
Shows the code version and last modified date.
"""
print('CDFread version:', str(CDF.version) + '.' + str(CDF.release) +
'.' + str(CDF.increment))
print('Date: 2018/01/11') | [
"def",
"getVersion",
"(",
")",
":",
"# @NoSelf",
"print",
"(",
"'CDFread version:'",
",",
"str",
"(",
"CDF",
".",
"version",
")",
"+",
"'.'",
"+",
"str",
"(",
"CDF",
".",
"release",
")",
"+",
"'.'",
"+",
"str",
"(",
"CDF",
".",
"increment",
")",
")... | Shows the code version and last modified date. | [
"Shows",
"the",
"code",
"version",
"and",
"last",
"modified",
"date",
"."
] | d237c60e5db67db0f92d96054209c25c4042465c | https://github.com/MAVENSDC/cdflib/blob/d237c60e5db67db0f92d96054209c25c4042465c/cdflib/cdfread.py#L2230-L2236 | train | 208,612 |
terryyin/translate-python | translate/providers/microsoft.py | AzureAuthClient.get_access_token | def get_access_token(self):
'''
Returns an access token for the specified subscription.
This method uses a cache to limit the number of requests to the token service.
A fresh token can be re-used during its lifetime of 10 minutes. After a successful
request to the token service, this method caches the access token. Subsequent
invocations of the method return the cached token for the next 5 minutes. After
5 minutes, a new token is fetched from the token service and the cache is updated.
'''
if (self.token is None) or (datetime.utcnow() > self.reuse_token_until):
headers = {'Ocp-Apim-Subscription-Key': self.client_secret}
response = requests.post(self.base_url, headers=headers)
response.raise_for_status()
self.token = response.content
self.reuse_token_until = datetime.utcnow() + timedelta(minutes=5)
return self.token.decode('utf-8') | python | def get_access_token(self):
'''
Returns an access token for the specified subscription.
This method uses a cache to limit the number of requests to the token service.
A fresh token can be re-used during its lifetime of 10 minutes. After a successful
request to the token service, this method caches the access token. Subsequent
invocations of the method return the cached token for the next 5 minutes. After
5 minutes, a new token is fetched from the token service and the cache is updated.
'''
if (self.token is None) or (datetime.utcnow() > self.reuse_token_until):
headers = {'Ocp-Apim-Subscription-Key': self.client_secret}
response = requests.post(self.base_url, headers=headers)
response.raise_for_status()
self.token = response.content
self.reuse_token_until = datetime.utcnow() + timedelta(minutes=5)
return self.token.decode('utf-8') | [
"def",
"get_access_token",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"token",
"is",
"None",
")",
"or",
"(",
"datetime",
".",
"utcnow",
"(",
")",
">",
"self",
".",
"reuse_token_until",
")",
":",
"headers",
"=",
"{",
"'Ocp-Apim-Subscription-Key'",
":"... | Returns an access token for the specified subscription.
This method uses a cache to limit the number of requests to the token service.
A fresh token can be re-used during its lifetime of 10 minutes. After a successful
request to the token service, this method caches the access token. Subsequent
invocations of the method return the cached token for the next 5 minutes. After
5 minutes, a new token is fetched from the token service and the cache is updated. | [
"Returns",
"an",
"access",
"token",
"for",
"the",
"specified",
"subscription",
"."
] | 20b8e892a97ab02c0708269c236ad1f12083a2d8 | https://github.com/terryyin/translate-python/blob/20b8e892a97ab02c0708269c236ad1f12083a2d8/translate/providers/microsoft.py#L29-L48 | train | 208,613 |
terryyin/translate-python | translate/main.py | main | def main(from_lang, to_lang, provider, secret_access_key, output_only, text):
"""
Python command line tool to make on line translations
\b
Example:
\b
\t $ translate-cli -t zh the book is on the table
\t 碗是在桌子上。
\b
Available languages:
\b
\t https://en.wikipedia.org/wiki/ISO_639-1
\t Examples: (e.g. en, ja, ko, pt, zh, zh-TW, ...)
"""
text = ' '.join(text)
kwargs = dict(from_lang=from_lang, to_lang=to_lang, provider=provider)
if provider != DEFAULT_PROVIDER:
kwargs['secret_access_key'] = secret_access_key
translator = Translator(**kwargs)
translation = translator.translate(text)
if sys.version_info.major == 2:
translation = translation.encode(locale.getpreferredencoding())
if output_only:
click.echo(translation)
return translation
click.echo('\nTranslation: {}'.format(translation))
click.echo('-' * 25)
click.echo('Translated by: {}'.format(translator.provider.name))
return translation | python | def main(from_lang, to_lang, provider, secret_access_key, output_only, text):
"""
Python command line tool to make on line translations
\b
Example:
\b
\t $ translate-cli -t zh the book is on the table
\t 碗是在桌子上。
\b
Available languages:
\b
\t https://en.wikipedia.org/wiki/ISO_639-1
\t Examples: (e.g. en, ja, ko, pt, zh, zh-TW, ...)
"""
text = ' '.join(text)
kwargs = dict(from_lang=from_lang, to_lang=to_lang, provider=provider)
if provider != DEFAULT_PROVIDER:
kwargs['secret_access_key'] = secret_access_key
translator = Translator(**kwargs)
translation = translator.translate(text)
if sys.version_info.major == 2:
translation = translation.encode(locale.getpreferredencoding())
if output_only:
click.echo(translation)
return translation
click.echo('\nTranslation: {}'.format(translation))
click.echo('-' * 25)
click.echo('Translated by: {}'.format(translator.provider.name))
return translation | [
"def",
"main",
"(",
"from_lang",
",",
"to_lang",
",",
"provider",
",",
"secret_access_key",
",",
"output_only",
",",
"text",
")",
":",
"text",
"=",
"' '",
".",
"join",
"(",
"text",
")",
"kwargs",
"=",
"dict",
"(",
"from_lang",
"=",
"from_lang",
",",
"t... | Python command line tool to make on line translations
\b
Example:
\b
\t $ translate-cli -t zh the book is on the table
\t 碗是在桌子上。
\b
Available languages:
\b
\t https://en.wikipedia.org/wiki/ISO_639-1
\t Examples: (e.g. en, ja, ko, pt, zh, zh-TW, ...) | [
"Python",
"command",
"line",
"tool",
"to",
"make",
"on",
"line",
"translations"
] | 20b8e892a97ab02c0708269c236ad1f12083a2d8 | https://github.com/terryyin/translate-python/blob/20b8e892a97ab02c0708269c236ad1f12083a2d8/translate/main.py#L137-L172 | train | 208,614 |
SatelliteQE/nailgun | docs/create_user_plain.py | get_organization_id | def get_organization_id(server_config, label):
"""Return the ID of the organization with label ``label``.
:param server_config: A dict of information about the server being talked
to. The dict should include the keys "url", "auth" and "verify".
:param label: A string label that will be used when searching. Every
organization should have a unique label.
:returns: An organization ID. (Typically an integer.)
"""
response = requests.get(
server_config['url'] + '/katello/api/v2/organizations',
data=json.dumps({'search': 'label={}'.format(label)}),
auth=server_config['auth'],
headers={'content-type': 'application/json'},
verify=server_config['verify'],
)
response.raise_for_status()
decoded = response.json()
if decoded['subtotal'] != 1:
print(
'Expected to find one organization, but instead found {0}. Search '
'results: {1}'.format(decoded['subtotal'], decoded['results'])
)
exit(1)
return decoded['results'][0]['id'] | python | def get_organization_id(server_config, label):
"""Return the ID of the organization with label ``label``.
:param server_config: A dict of information about the server being talked
to. The dict should include the keys "url", "auth" and "verify".
:param label: A string label that will be used when searching. Every
organization should have a unique label.
:returns: An organization ID. (Typically an integer.)
"""
response = requests.get(
server_config['url'] + '/katello/api/v2/organizations',
data=json.dumps({'search': 'label={}'.format(label)}),
auth=server_config['auth'],
headers={'content-type': 'application/json'},
verify=server_config['verify'],
)
response.raise_for_status()
decoded = response.json()
if decoded['subtotal'] != 1:
print(
'Expected to find one organization, but instead found {0}. Search '
'results: {1}'.format(decoded['subtotal'], decoded['results'])
)
exit(1)
return decoded['results'][0]['id'] | [
"def",
"get_organization_id",
"(",
"server_config",
",",
"label",
")",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"server_config",
"[",
"'url'",
"]",
"+",
"'/katello/api/v2/organizations'",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"{",
"'search'",
... | Return the ID of the organization with label ``label``.
:param server_config: A dict of information about the server being talked
to. The dict should include the keys "url", "auth" and "verify".
:param label: A string label that will be used when searching. Every
organization should have a unique label.
:returns: An organization ID. (Typically an integer.) | [
"Return",
"the",
"ID",
"of",
"the",
"organization",
"with",
"label",
"label",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/docs/create_user_plain.py#L50-L75 | train | 208,615 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | _make_entity_from_id | def _make_entity_from_id(entity_cls, entity_obj_or_id, server_config):
"""Given an entity object or an ID, return an entity object.
If the value passed in is an object that is a subclass of :class:`Entity`,
return that value. Otherwise, create an object of the type that ``field``
references, give that object an ID of ``field_value``, and return that
object.
:param entity_cls: An :class:`Entity` subclass.
:param entity_obj_or_id: Either a :class:`nailgun.entity_mixins.Entity`
object or an entity ID.
:returns: An ``entity_cls`` object.
:rtype: nailgun.entity_mixins.Entity
"""
if isinstance(entity_obj_or_id, entity_cls):
return entity_obj_or_id
return entity_cls(server_config, id=entity_obj_or_id) | python | def _make_entity_from_id(entity_cls, entity_obj_or_id, server_config):
"""Given an entity object or an ID, return an entity object.
If the value passed in is an object that is a subclass of :class:`Entity`,
return that value. Otherwise, create an object of the type that ``field``
references, give that object an ID of ``field_value``, and return that
object.
:param entity_cls: An :class:`Entity` subclass.
:param entity_obj_or_id: Either a :class:`nailgun.entity_mixins.Entity`
object or an entity ID.
:returns: An ``entity_cls`` object.
:rtype: nailgun.entity_mixins.Entity
"""
if isinstance(entity_obj_or_id, entity_cls):
return entity_obj_or_id
return entity_cls(server_config, id=entity_obj_or_id) | [
"def",
"_make_entity_from_id",
"(",
"entity_cls",
",",
"entity_obj_or_id",
",",
"server_config",
")",
":",
"if",
"isinstance",
"(",
"entity_obj_or_id",
",",
"entity_cls",
")",
":",
"return",
"entity_obj_or_id",
"return",
"entity_cls",
"(",
"server_config",
",",
"id"... | Given an entity object or an ID, return an entity object.
If the value passed in is an object that is a subclass of :class:`Entity`,
return that value. Otherwise, create an object of the type that ``field``
references, give that object an ID of ``field_value``, and return that
object.
:param entity_cls: An :class:`Entity` subclass.
:param entity_obj_or_id: Either a :class:`nailgun.entity_mixins.Entity`
object or an entity ID.
:returns: An ``entity_cls`` object.
:rtype: nailgun.entity_mixins.Entity | [
"Given",
"an",
"entity",
"object",
"or",
"an",
"ID",
"return",
"an",
"entity",
"object",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L120-L137 | train | 208,616 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | _get_entity_id | def _get_entity_id(field_name, attrs):
"""Find the ID for a one to one relationship.
The server may return JSON data in the following forms for a
:class:`nailgun.entity_fields.OneToOneField`::
'user': None
'user': {'name': 'Alice Hayes', 'login': 'ahayes', 'id': 1}
'user_id': 1
'user_id': None
Search ``attrs`` for a one to one ``field_name`` and return its ID.
:param field_name: A string. The name of a field.
:param attrs: A dict. A JSON payload as returned from a server.
:returns: Either an entity ID or None.
"""
field_name_id = field_name + '_id'
if field_name in attrs:
if attrs[field_name] is None:
return None
elif 'id' in attrs[field_name]:
return attrs[field_name]['id']
if field_name_id in attrs:
return attrs[field_name_id]
else:
raise MissingValueError(
'Cannot find a value for the "{0}" field. Searched for keys named '
'{1}, but available keys are {2}.'
.format(field_name, (field_name, field_name_id), attrs.keys())
) | python | def _get_entity_id(field_name, attrs):
"""Find the ID for a one to one relationship.
The server may return JSON data in the following forms for a
:class:`nailgun.entity_fields.OneToOneField`::
'user': None
'user': {'name': 'Alice Hayes', 'login': 'ahayes', 'id': 1}
'user_id': 1
'user_id': None
Search ``attrs`` for a one to one ``field_name`` and return its ID.
:param field_name: A string. The name of a field.
:param attrs: A dict. A JSON payload as returned from a server.
:returns: Either an entity ID or None.
"""
field_name_id = field_name + '_id'
if field_name in attrs:
if attrs[field_name] is None:
return None
elif 'id' in attrs[field_name]:
return attrs[field_name]['id']
if field_name_id in attrs:
return attrs[field_name_id]
else:
raise MissingValueError(
'Cannot find a value for the "{0}" field. Searched for keys named '
'{1}, but available keys are {2}.'
.format(field_name, (field_name, field_name_id), attrs.keys())
) | [
"def",
"_get_entity_id",
"(",
"field_name",
",",
"attrs",
")",
":",
"field_name_id",
"=",
"field_name",
"+",
"'_id'",
"if",
"field_name",
"in",
"attrs",
":",
"if",
"attrs",
"[",
"field_name",
"]",
"is",
"None",
":",
"return",
"None",
"elif",
"'id'",
"in",
... | Find the ID for a one to one relationship.
The server may return JSON data in the following forms for a
:class:`nailgun.entity_fields.OneToOneField`::
'user': None
'user': {'name': 'Alice Hayes', 'login': 'ahayes', 'id': 1}
'user_id': 1
'user_id': None
Search ``attrs`` for a one to one ``field_name`` and return its ID.
:param field_name: A string. The name of a field.
:param attrs: A dict. A JSON payload as returned from a server.
:returns: Either an entity ID or None. | [
"Find",
"the",
"ID",
"for",
"a",
"one",
"to",
"one",
"relationship",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L226-L257 | train | 208,617 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | _get_entity_ids | def _get_entity_ids(field_name, attrs):
"""Find the IDs for a one to many relationship.
The server may return JSON data in the following forms for a
:class:`nailgun.entity_fields.OneToManyField`::
'user': [{'id': 1, …}, {'id': 42, …}]
'users': [{'id': 1, …}, {'id': 42, …}]
'user_ids': [1, 42]
Search ``attrs`` for a one to many ``field_name`` and return its ID.
:param field_name: A string. The name of a field.
:param attrs: A dict. A JSON payload as returned from a server.
:returns: An iterable of entity IDs.
"""
field_name_ids = field_name + '_ids'
plural_field_name = pluralize(field_name)
if field_name_ids in attrs:
return attrs[field_name_ids]
elif field_name in attrs:
return [entity['id'] for entity in attrs[field_name]]
elif plural_field_name in attrs:
return [entity['id'] for entity in attrs[plural_field_name]]
else:
raise MissingValueError(
'Cannot find a value for the "{0}" field. Searched for keys named '
'{1}, but available keys are {2}.'
.format(
field_name,
(field_name_ids, field_name, plural_field_name),
attrs.keys()
)
) | python | def _get_entity_ids(field_name, attrs):
"""Find the IDs for a one to many relationship.
The server may return JSON data in the following forms for a
:class:`nailgun.entity_fields.OneToManyField`::
'user': [{'id': 1, …}, {'id': 42, …}]
'users': [{'id': 1, …}, {'id': 42, …}]
'user_ids': [1, 42]
Search ``attrs`` for a one to many ``field_name`` and return its ID.
:param field_name: A string. The name of a field.
:param attrs: A dict. A JSON payload as returned from a server.
:returns: An iterable of entity IDs.
"""
field_name_ids = field_name + '_ids'
plural_field_name = pluralize(field_name)
if field_name_ids in attrs:
return attrs[field_name_ids]
elif field_name in attrs:
return [entity['id'] for entity in attrs[field_name]]
elif plural_field_name in attrs:
return [entity['id'] for entity in attrs[plural_field_name]]
else:
raise MissingValueError(
'Cannot find a value for the "{0}" field. Searched for keys named '
'{1}, but available keys are {2}.'
.format(
field_name,
(field_name_ids, field_name, plural_field_name),
attrs.keys()
)
) | [
"def",
"_get_entity_ids",
"(",
"field_name",
",",
"attrs",
")",
":",
"field_name_ids",
"=",
"field_name",
"+",
"'_ids'",
"plural_field_name",
"=",
"pluralize",
"(",
"field_name",
")",
"if",
"field_name_ids",
"in",
"attrs",
":",
"return",
"attrs",
"[",
"field_nam... | Find the IDs for a one to many relationship.
The server may return JSON data in the following forms for a
:class:`nailgun.entity_fields.OneToManyField`::
'user': [{'id': 1, …}, {'id': 42, …}]
'users': [{'id': 1, …}, {'id': 42, …}]
'user_ids': [1, 42]
Search ``attrs`` for a one to many ``field_name`` and return its ID.
:param field_name: A string. The name of a field.
:param attrs: A dict. A JSON payload as returned from a server.
:returns: An iterable of entity IDs. | [
"Find",
"the",
"IDs",
"for",
"a",
"one",
"to",
"many",
"relationship",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L260-L294 | train | 208,618 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | to_json_serializable | def to_json_serializable(obj):
""" Transforms obj into a json serializable object.
:param obj: entity or any json serializable object
:return: serializable object
"""
if isinstance(obj, Entity):
return obj.to_json_dict()
if isinstance(obj, dict):
return {k: to_json_serializable(v) for k, v in obj.items()}
elif isinstance(obj, (list, tuple)):
return [to_json_serializable(v) for v in obj]
elif isinstance(obj, datetime):
return obj.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(obj, date):
return obj.strftime('%Y-%m-%d')
return obj | python | def to_json_serializable(obj):
""" Transforms obj into a json serializable object.
:param obj: entity or any json serializable object
:return: serializable object
"""
if isinstance(obj, Entity):
return obj.to_json_dict()
if isinstance(obj, dict):
return {k: to_json_serializable(v) for k, v in obj.items()}
elif isinstance(obj, (list, tuple)):
return [to_json_serializable(v) for v in obj]
elif isinstance(obj, datetime):
return obj.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(obj, date):
return obj.strftime('%Y-%m-%d')
return obj | [
"def",
"to_json_serializable",
"(",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"Entity",
")",
":",
"return",
"obj",
".",
"to_json_dict",
"(",
")",
"if",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"return",
"{",
"k",
":",
"to_json_seria... | Transforms obj into a json serializable object.
:param obj: entity or any json serializable object
:return: serializable object | [
"Transforms",
"obj",
"into",
"a",
"json",
"serializable",
"object",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L1427-L1447 | train | 208,619 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | Entity.path | def path(self, which=None):
"""Return the path to the current entity.
Return the path to base entities of this entity's type if:
* ``which`` is ``'base'``, or
* ``which`` is ``None`` and instance attribute ``id`` is unset.
Return the path to this exact entity if instance attribute ``id`` is
set and:
* ``which`` is ``'self'``, or
* ``which`` is ``None``.
Raise :class:`NoSuchPathError` otherwise.
Child classes may choose to extend this method, especially if a child
entity offers more than the two URLs supported by default. If extended,
then the extending class should check for custom parameters before
calling ``super``::
def path(self, which):
if which == 'custom':
return urljoin(…)
super(ChildEntity, self).__init__(which)
This will allow the extending method to accept a custom parameter
without accidentally raising a :class:`NoSuchPathError`.
:param which: A string. Optional. Valid arguments are 'self' and
'base'.
:return: A string. A fully qualified URL.
:raises nailgun.entity_mixins.NoSuchPathError: If no path can be built.
"""
# It is OK that member ``self._meta`` is not found. Subclasses are
# required to set that attribute if they wish to use this method.
#
# Beware of leading and trailing slashes:
#
# urljoin('example.com', 'foo') => 'foo'
# urljoin('example.com/', 'foo') => 'example.com/foo'
# urljoin('example.com', '/foo') => '/foo'
# urljoin('example.com/', '/foo') => '/foo'
#
base = urljoin(
self._server_config.url + '/',
self._meta['api_path'] # pylint:disable=no-member
)
if which == 'base' or (which is None and not hasattr(self, 'id')):
return base
elif (which == 'self' or which is None) and hasattr(self, 'id'):
return urljoin(base + '/', str(self.id)) # pylint:disable=E1101
raise NoSuchPathError | python | def path(self, which=None):
"""Return the path to the current entity.
Return the path to base entities of this entity's type if:
* ``which`` is ``'base'``, or
* ``which`` is ``None`` and instance attribute ``id`` is unset.
Return the path to this exact entity if instance attribute ``id`` is
set and:
* ``which`` is ``'self'``, or
* ``which`` is ``None``.
Raise :class:`NoSuchPathError` otherwise.
Child classes may choose to extend this method, especially if a child
entity offers more than the two URLs supported by default. If extended,
then the extending class should check for custom parameters before
calling ``super``::
def path(self, which):
if which == 'custom':
return urljoin(…)
super(ChildEntity, self).__init__(which)
This will allow the extending method to accept a custom parameter
without accidentally raising a :class:`NoSuchPathError`.
:param which: A string. Optional. Valid arguments are 'self' and
'base'.
:return: A string. A fully qualified URL.
:raises nailgun.entity_mixins.NoSuchPathError: If no path can be built.
"""
# It is OK that member ``self._meta`` is not found. Subclasses are
# required to set that attribute if they wish to use this method.
#
# Beware of leading and trailing slashes:
#
# urljoin('example.com', 'foo') => 'foo'
# urljoin('example.com/', 'foo') => 'example.com/foo'
# urljoin('example.com', '/foo') => '/foo'
# urljoin('example.com/', '/foo') => '/foo'
#
base = urljoin(
self._server_config.url + '/',
self._meta['api_path'] # pylint:disable=no-member
)
if which == 'base' or (which is None and not hasattr(self, 'id')):
return base
elif (which == 'self' or which is None) and hasattr(self, 'id'):
return urljoin(base + '/', str(self.id)) # pylint:disable=E1101
raise NoSuchPathError | [
"def",
"path",
"(",
"self",
",",
"which",
"=",
"None",
")",
":",
"# It is OK that member ``self._meta`` is not found. Subclasses are",
"# required to set that attribute if they wish to use this method.",
"#",
"# Beware of leading and trailing slashes:",
"#",
"# urljoin('example.com... | Return the path to the current entity.
Return the path to base entities of this entity's type if:
* ``which`` is ``'base'``, or
* ``which`` is ``None`` and instance attribute ``id`` is unset.
Return the path to this exact entity if instance attribute ``id`` is
set and:
* ``which`` is ``'self'``, or
* ``which`` is ``None``.
Raise :class:`NoSuchPathError` otherwise.
Child classes may choose to extend this method, especially if a child
entity offers more than the two URLs supported by default. If extended,
then the extending class should check for custom parameters before
calling ``super``::
def path(self, which):
if which == 'custom':
return urljoin(…)
super(ChildEntity, self).__init__(which)
This will allow the extending method to accept a custom parameter
without accidentally raising a :class:`NoSuchPathError`.
:param which: A string. Optional. Valid arguments are 'self' and
'base'.
:return: A string. A fully qualified URL.
:raises nailgun.entity_mixins.NoSuchPathError: If no path can be built. | [
"Return",
"the",
"path",
"to",
"the",
"current",
"entity",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L437-L490 | train | 208,620 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | Entity.get_values | def get_values(self):
"""Return a copy of field values on the current object.
This method is almost identical to ``vars(self).copy()``. However,
only instance attributes that correspond to a field are included in
the returned dict.
:return: A dict mapping field names to user-provided values.
"""
attrs = vars(self).copy()
attrs.pop('_server_config')
attrs.pop('_fields')
attrs.pop('_meta')
if '_path_fields' in attrs:
attrs.pop('_path_fields')
return attrs | python | def get_values(self):
"""Return a copy of field values on the current object.
This method is almost identical to ``vars(self).copy()``. However,
only instance attributes that correspond to a field are included in
the returned dict.
:return: A dict mapping field names to user-provided values.
"""
attrs = vars(self).copy()
attrs.pop('_server_config')
attrs.pop('_fields')
attrs.pop('_meta')
if '_path_fields' in attrs:
attrs.pop('_path_fields')
return attrs | [
"def",
"get_values",
"(",
"self",
")",
":",
"attrs",
"=",
"vars",
"(",
"self",
")",
".",
"copy",
"(",
")",
"attrs",
".",
"pop",
"(",
"'_server_config'",
")",
"attrs",
".",
"pop",
"(",
"'_fields'",
")",
"attrs",
".",
"pop",
"(",
"'_meta'",
")",
"if"... | Return a copy of field values on the current object.
This method is almost identical to ``vars(self).copy()``. However,
only instance attributes that correspond to a field are included in
the returned dict.
:return: A dict mapping field names to user-provided values. | [
"Return",
"a",
"copy",
"of",
"field",
"values",
"on",
"the",
"current",
"object",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L501-L516 | train | 208,621 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | Entity.to_json_dict | def to_json_dict(self, filter_fcn=None):
"""Create a dict with Entity properties for json encoding.
It can be overridden by subclasses for each standard serialization
doesn't work. By default it call _to_json_dict on OneToOne fields
and build a list calling the same method on each OneToMany object's
fields.
Fields can be filtered accordingly to 'filter_fcn'. This callable
receives field's name as first parameter and fields itself as second
parameter. It must return True if field's value should be included on
dict and False otherwise. If not provided field will not be filtered.
:type filter_fcn: callable
:return: dct
"""
fields, values = self.get_fields(), self.get_values()
filtered_fields = fields.items()
if filter_fcn is not None:
filtered_fields = (
tpl for tpl in filtered_fields if filter_fcn(tpl[0], tpl[1])
)
json_dct = {}
for field_name, field in filtered_fields:
if field_name in values:
value = values[field_name]
if value is None:
json_dct[field_name] = None
# This conditions is needed because some times you get
# None on an OneToOneField what lead to an error
# on bellow condition, e.g., calling value.to_json_dict()
# when value is None
elif isinstance(field, OneToOneField):
json_dct[field_name] = value.to_json_dict()
elif isinstance(field, OneToManyField):
json_dct[field_name] = [
entity.to_json_dict() for entity in value
]
else:
json_dct[field_name] = to_json_serializable(value)
return json_dct | python | def to_json_dict(self, filter_fcn=None):
"""Create a dict with Entity properties for json encoding.
It can be overridden by subclasses for each standard serialization
doesn't work. By default it call _to_json_dict on OneToOne fields
and build a list calling the same method on each OneToMany object's
fields.
Fields can be filtered accordingly to 'filter_fcn'. This callable
receives field's name as first parameter and fields itself as second
parameter. It must return True if field's value should be included on
dict and False otherwise. If not provided field will not be filtered.
:type filter_fcn: callable
:return: dct
"""
fields, values = self.get_fields(), self.get_values()
filtered_fields = fields.items()
if filter_fcn is not None:
filtered_fields = (
tpl for tpl in filtered_fields if filter_fcn(tpl[0], tpl[1])
)
json_dct = {}
for field_name, field in filtered_fields:
if field_name in values:
value = values[field_name]
if value is None:
json_dct[field_name] = None
# This conditions is needed because some times you get
# None on an OneToOneField what lead to an error
# on bellow condition, e.g., calling value.to_json_dict()
# when value is None
elif isinstance(field, OneToOneField):
json_dct[field_name] = value.to_json_dict()
elif isinstance(field, OneToManyField):
json_dct[field_name] = [
entity.to_json_dict() for entity in value
]
else:
json_dct[field_name] = to_json_serializable(value)
return json_dct | [
"def",
"to_json_dict",
"(",
"self",
",",
"filter_fcn",
"=",
"None",
")",
":",
"fields",
",",
"values",
"=",
"self",
".",
"get_fields",
"(",
")",
",",
"self",
".",
"get_values",
"(",
")",
"filtered_fields",
"=",
"fields",
".",
"items",
"(",
")",
"if",
... | Create a dict with Entity properties for json encoding.
It can be overridden by subclasses for each standard serialization
doesn't work. By default it call _to_json_dict on OneToOne fields
and build a list calling the same method on each OneToMany object's
fields.
Fields can be filtered accordingly to 'filter_fcn'. This callable
receives field's name as first parameter and fields itself as second
parameter. It must return True if field's value should be included on
dict and False otherwise. If not provided field will not be filtered.
:type filter_fcn: callable
:return: dct | [
"Create",
"a",
"dict",
"with",
"Entity",
"properties",
"for",
"json",
"encoding",
".",
"It",
"can",
"be",
"overridden",
"by",
"subclasses",
"for",
"each",
"standard",
"serialization",
"doesn",
"t",
"work",
".",
"By",
"default",
"it",
"call",
"_to_json_dict",
... | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L547-L586 | train | 208,622 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | Entity.compare | def compare(self, other, filter_fcn=None):
"""Returns True if properties can be compared in terms of eq.
Entity's Fields can be filtered accordingly to 'filter_fcn'.
This callable receives field's name as first parameter and field itself
as second parameter.
It must return True if field's value should be included on
comparison and False otherwise. If not provided field's marked as
unique will not be compared by default. 'id' and 'name' are examples of
unique fields commonly ignored. Check Entities fields for fields marked
with 'unique=True'
:param other: entity to compare
:param filter_fcn: callable
:return: boolean
"""
if not isinstance(other, type(self)):
return False
if filter_fcn is None:
def filter_unique(_, field):
"""Filter function for unique fields"""
return not field.unique
filter_fcn = filter_unique
return self.to_json_dict(filter_fcn) == other.to_json_dict(filter_fcn) | python | def compare(self, other, filter_fcn=None):
"""Returns True if properties can be compared in terms of eq.
Entity's Fields can be filtered accordingly to 'filter_fcn'.
This callable receives field's name as first parameter and field itself
as second parameter.
It must return True if field's value should be included on
comparison and False otherwise. If not provided field's marked as
unique will not be compared by default. 'id' and 'name' are examples of
unique fields commonly ignored. Check Entities fields for fields marked
with 'unique=True'
:param other: entity to compare
:param filter_fcn: callable
:return: boolean
"""
if not isinstance(other, type(self)):
return False
if filter_fcn is None:
def filter_unique(_, field):
"""Filter function for unique fields"""
return not field.unique
filter_fcn = filter_unique
return self.to_json_dict(filter_fcn) == other.to_json_dict(filter_fcn) | [
"def",
"compare",
"(",
"self",
",",
"other",
",",
"filter_fcn",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"type",
"(",
"self",
")",
")",
":",
"return",
"False",
"if",
"filter_fcn",
"is",
"None",
":",
"def",
"filter_unique",
... | Returns True if properties can be compared in terms of eq.
Entity's Fields can be filtered accordingly to 'filter_fcn'.
This callable receives field's name as first parameter and field itself
as second parameter.
It must return True if field's value should be included on
comparison and False otherwise. If not provided field's marked as
unique will not be compared by default. 'id' and 'name' are examples of
unique fields commonly ignored. Check Entities fields for fields marked
with 'unique=True'
:param other: entity to compare
:param filter_fcn: callable
:return: boolean | [
"Returns",
"True",
"if",
"properties",
"can",
"be",
"compared",
"in",
"terms",
"of",
"eq",
".",
"Entity",
"s",
"Fields",
"can",
"be",
"filtered",
"accordingly",
"to",
"filter_fcn",
".",
"This",
"callable",
"receives",
"field",
"s",
"name",
"as",
"first",
"... | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L599-L623 | train | 208,623 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | EntityCreateMixin.create_missing | def create_missing(self):
"""Automagically populate all required instance attributes.
Iterate through the set of all required class
:class:`nailgun.entity_fields.Field` defined on ``type(self)`` and
create a corresponding instance attribute if none exists. Subclasses
should override this method if there is some relationship between two
required fields.
:return: Nothing. This method relies on side-effects.
"""
for field_name, field in self.get_fields().items():
if field.required and not hasattr(self, field_name):
# Most `gen_value` methods return a value such as an integer,
# string or dictionary, but OneTo{One,Many}Field.gen_value
# returns the referenced class.
if hasattr(field, 'default'):
value = field.default
elif hasattr(field, 'choices'):
value = gen_choice(field.choices)
elif isinstance(field, OneToOneField):
value = field.gen_value()(self._server_config).create(True)
elif isinstance(field, OneToManyField):
value = [
field.gen_value()(self._server_config).create(True)
]
else:
value = field.gen_value()
setattr(self, field_name, value) | python | def create_missing(self):
"""Automagically populate all required instance attributes.
Iterate through the set of all required class
:class:`nailgun.entity_fields.Field` defined on ``type(self)`` and
create a corresponding instance attribute if none exists. Subclasses
should override this method if there is some relationship between two
required fields.
:return: Nothing. This method relies on side-effects.
"""
for field_name, field in self.get_fields().items():
if field.required and not hasattr(self, field_name):
# Most `gen_value` methods return a value such as an integer,
# string or dictionary, but OneTo{One,Many}Field.gen_value
# returns the referenced class.
if hasattr(field, 'default'):
value = field.default
elif hasattr(field, 'choices'):
value = gen_choice(field.choices)
elif isinstance(field, OneToOneField):
value = field.gen_value()(self._server_config).create(True)
elif isinstance(field, OneToManyField):
value = [
field.gen_value()(self._server_config).create(True)
]
else:
value = field.gen_value()
setattr(self, field_name, value) | [
"def",
"create_missing",
"(",
"self",
")",
":",
"for",
"field_name",
",",
"field",
"in",
"self",
".",
"get_fields",
"(",
")",
".",
"items",
"(",
")",
":",
"if",
"field",
".",
"required",
"and",
"not",
"hasattr",
"(",
"self",
",",
"field_name",
")",
"... | Automagically populate all required instance attributes.
Iterate through the set of all required class
:class:`nailgun.entity_fields.Field` defined on ``type(self)`` and
create a corresponding instance attribute if none exists. Subclasses
should override this method if there is some relationship between two
required fields.
:return: Nothing. This method relies on side-effects. | [
"Automagically",
"populate",
"all",
"required",
"instance",
"attributes",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L859-L888 | train | 208,624 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | EntityUpdateMixin.update_payload | def update_payload(self, fields=None):
"""Create a payload of values that can be sent to the server.
By default, this method behaves just like :func:`_payload`. However,
one can also specify a certain set of fields that should be returned.
For more information, see :meth:`update`.
"""
values = self.get_values()
if fields is not None:
values = {field: values[field] for field in fields}
return _payload(self.get_fields(), values) | python | def update_payload(self, fields=None):
"""Create a payload of values that can be sent to the server.
By default, this method behaves just like :func:`_payload`. However,
one can also specify a certain set of fields that should be returned.
For more information, see :meth:`update`.
"""
values = self.get_values()
if fields is not None:
values = {field: values[field] for field in fields}
return _payload(self.get_fields(), values) | [
"def",
"update_payload",
"(",
"self",
",",
"fields",
"=",
"None",
")",
":",
"values",
"=",
"self",
".",
"get_values",
"(",
")",
"if",
"fields",
"is",
"not",
"None",
":",
"values",
"=",
"{",
"field",
":",
"values",
"[",
"field",
"]",
"for",
"field",
... | Create a payload of values that can be sent to the server.
By default, this method behaves just like :func:`_payload`. However,
one can also specify a certain set of fields that should be returned.
For more information, see :meth:`update`. | [
"Create",
"a",
"payload",
"of",
"values",
"that",
"can",
"be",
"sent",
"to",
"the",
"server",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L992-L1003 | train | 208,625 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | EntitySearchMixin.search_payload | def search_payload(self, fields=None, query=None):
"""Create a search query.
Do the following:
1. Generate a search query. By default, all values returned by
:meth:`nailgun.entity_mixins.Entity.get_values` are used. If
``fields`` is specified, only the named values are used.
2. Merge ``query`` in to the generated search query.
3. Return the result.
The rules for generating a search query can be illustrated by example.
Let's say that we have an entity with an
:class:`nailgun.entity_fields.IntegerField`, a
:class:`nailgun.entity_fields.OneToOneField` and a
:class:`nailgun.entity_fields.OneToManyField`::
>>> some_entity = SomeEntity(id=1, one=2, many=[3, 4])
>>> fields = some_entity.get_fields()
>>> isinstance(fields['id'], IntegerField)
True
>>> isinstance(fields['one'], OneToOneField)
True
>>> isinstance(fields['many'], OneToManyField)
True
This method appends "_id" and "_ids" on to the names of each
``OneToOneField`` and ``OneToManyField``, respectively::
>>> some_entity.search_payload()
{'id': 1, 'one_id': 2, 'many_ids': [3, 4]}
By default, all fields are used. But you can specify a set of field
names to use::
>>> some_entity.search_payload({'id'})
{'id': 1}
>>> some_entity.search_payload({'one'})
{'one_id': 2}
>>> some_entity.search_payload({'id', 'one'})
{'id': 1, 'one_id': 2}
If a ``query`` is specified, it is merged in to the generated query::
>>> some_entity.search_payload(query={'id': 5})
{'id': 5, 'one_id': 2, 'many_ids': [3, 4]}
>>> some_entity.search_payload(query={'per_page': 1000})
{'id': 1, 'one_id': 2, 'many_ids': [3, 4], 'per_page': 1000}
.. WARNING:: This method currently generates an extremely naive search
query that will be wrong in many cases. In addition, Satellite
currently accepts invalid search queries without complaint. Make
sure to check the API documentation for your version of Satellite
against what this method produces.
:param fields: See :meth:`search`.
:param query: See :meth:`search`.
:returns: A dict that can be encoded as JSON and used in a search.
"""
if fields is None:
fields = set(self.get_values().keys())
if query is None:
query = {}
payload = {}
fields_dict = self.get_fields()
for field in fields:
value = getattr(self, field)
if isinstance(fields_dict[field], OneToOneField):
payload[field + '_id'] = value.id
elif isinstance(fields_dict[field], OneToManyField):
payload[field + '_ids'] = [entity.id for entity in value]
else:
payload[field] = value
payload.update(query)
return payload | python | def search_payload(self, fields=None, query=None):
"""Create a search query.
Do the following:
1. Generate a search query. By default, all values returned by
:meth:`nailgun.entity_mixins.Entity.get_values` are used. If
``fields`` is specified, only the named values are used.
2. Merge ``query`` in to the generated search query.
3. Return the result.
The rules for generating a search query can be illustrated by example.
Let's say that we have an entity with an
:class:`nailgun.entity_fields.IntegerField`, a
:class:`nailgun.entity_fields.OneToOneField` and a
:class:`nailgun.entity_fields.OneToManyField`::
>>> some_entity = SomeEntity(id=1, one=2, many=[3, 4])
>>> fields = some_entity.get_fields()
>>> isinstance(fields['id'], IntegerField)
True
>>> isinstance(fields['one'], OneToOneField)
True
>>> isinstance(fields['many'], OneToManyField)
True
This method appends "_id" and "_ids" on to the names of each
``OneToOneField`` and ``OneToManyField``, respectively::
>>> some_entity.search_payload()
{'id': 1, 'one_id': 2, 'many_ids': [3, 4]}
By default, all fields are used. But you can specify a set of field
names to use::
>>> some_entity.search_payload({'id'})
{'id': 1}
>>> some_entity.search_payload({'one'})
{'one_id': 2}
>>> some_entity.search_payload({'id', 'one'})
{'id': 1, 'one_id': 2}
If a ``query`` is specified, it is merged in to the generated query::
>>> some_entity.search_payload(query={'id': 5})
{'id': 5, 'one_id': 2, 'many_ids': [3, 4]}
>>> some_entity.search_payload(query={'per_page': 1000})
{'id': 1, 'one_id': 2, 'many_ids': [3, 4], 'per_page': 1000}
.. WARNING:: This method currently generates an extremely naive search
query that will be wrong in many cases. In addition, Satellite
currently accepts invalid search queries without complaint. Make
sure to check the API documentation for your version of Satellite
against what this method produces.
:param fields: See :meth:`search`.
:param query: See :meth:`search`.
:returns: A dict that can be encoded as JSON and used in a search.
"""
if fields is None:
fields = set(self.get_values().keys())
if query is None:
query = {}
payload = {}
fields_dict = self.get_fields()
for field in fields:
value = getattr(self, field)
if isinstance(fields_dict[field], OneToOneField):
payload[field + '_id'] = value.id
elif isinstance(fields_dict[field], OneToManyField):
payload[field + '_ids'] = [entity.id for entity in value]
else:
payload[field] = value
payload.update(query)
return payload | [
"def",
"search_payload",
"(",
"self",
",",
"fields",
"=",
"None",
",",
"query",
"=",
"None",
")",
":",
"if",
"fields",
"is",
"None",
":",
"fields",
"=",
"set",
"(",
"self",
".",
"get_values",
"(",
")",
".",
"keys",
"(",
")",
")",
"if",
"query",
"... | Create a search query.
Do the following:
1. Generate a search query. By default, all values returned by
:meth:`nailgun.entity_mixins.Entity.get_values` are used. If
``fields`` is specified, only the named values are used.
2. Merge ``query`` in to the generated search query.
3. Return the result.
The rules for generating a search query can be illustrated by example.
Let's say that we have an entity with an
:class:`nailgun.entity_fields.IntegerField`, a
:class:`nailgun.entity_fields.OneToOneField` and a
:class:`nailgun.entity_fields.OneToManyField`::
>>> some_entity = SomeEntity(id=1, one=2, many=[3, 4])
>>> fields = some_entity.get_fields()
>>> isinstance(fields['id'], IntegerField)
True
>>> isinstance(fields['one'], OneToOneField)
True
>>> isinstance(fields['many'], OneToManyField)
True
This method appends "_id" and "_ids" on to the names of each
``OneToOneField`` and ``OneToManyField``, respectively::
>>> some_entity.search_payload()
{'id': 1, 'one_id': 2, 'many_ids': [3, 4]}
By default, all fields are used. But you can specify a set of field
names to use::
>>> some_entity.search_payload({'id'})
{'id': 1}
>>> some_entity.search_payload({'one'})
{'one_id': 2}
>>> some_entity.search_payload({'id', 'one'})
{'id': 1, 'one_id': 2}
If a ``query`` is specified, it is merged in to the generated query::
>>> some_entity.search_payload(query={'id': 5})
{'id': 5, 'one_id': 2, 'many_ids': [3, 4]}
>>> some_entity.search_payload(query={'per_page': 1000})
{'id': 1, 'one_id': 2, 'many_ids': [3, 4], 'per_page': 1000}
.. WARNING:: This method currently generates an extremely naive search
query that will be wrong in many cases. In addition, Satellite
currently accepts invalid search queries without complaint. Make
sure to check the API documentation for your version of Satellite
against what this method produces.
:param fields: See :meth:`search`.
:param query: See :meth:`search`.
:returns: A dict that can be encoded as JSON and used in a search. | [
"Create",
"a",
"search",
"query",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L1097-L1173 | train | 208,626 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | EntitySearchMixin.search_normalize | def search_normalize(self, results):
"""Normalize search results so they can be used to create new entities.
See :meth:`search` for an example of how to use this method. Here's a
simplified example::
results = self.search_json()
results = self.search_normalize(results)
entity = SomeEntity(some_cfg, **results[0])
At this time, it is possible to parse all search results without
knowing what search query was sent to the server. However, it is
possible that certain responses can only be parsed if the search query
is known. If that is the case, this method will be given a new
``payload`` argument, where ``payload`` is the query sent to the
server.
As a precaution, the following is higly recommended:
* :meth:`search` may alter ``fields`` and ``query`` at will.
* :meth:`search_payload` may alter ``fields`` and ``query`` in an
idempotent manner.
* No other method should alter ``fields`` or ``query``.
:param results: A list of dicts, where each dict is a set of attributes
for one entity. The contents of these dicts are as is returned from
the server.
:returns: A list of dicts, where each dict is a set of attributes for
one entity. The contents of these dicts have been normalized and
can be used to instantiate entities.
"""
fields = self.get_fields()
normalized = []
for result in results:
# For each field that we know about, copy the corresponding field
# from the server's search result. If any extra attributes are
# copied over, Entity.__init__ will raise a NoSuchFieldError.
# Examples of problematic results from server:
#
# * organization_id (denormalized OneToOne. see above)
# * organizations, organization_ids (denormalized OneToMany. above)
# * updated_at, created_at (these may be handled in the future)
# * sp_subnet (Host.sp_subnet is an undocumented field)
#
attrs = {}
for field_name, field in fields.items():
if isinstance(field, OneToOneField):
try:
attrs[field_name] = _get_entity_id(field_name, result)
except MissingValueError:
pass
elif isinstance(field, OneToManyField):
try:
attrs[field_name] = _get_entity_ids(field_name, result)
except MissingValueError:
pass
else:
try:
attrs[field_name] = result[field_name]
except KeyError:
pass
normalized.append(attrs)
return normalized | python | def search_normalize(self, results):
"""Normalize search results so they can be used to create new entities.
See :meth:`search` for an example of how to use this method. Here's a
simplified example::
results = self.search_json()
results = self.search_normalize(results)
entity = SomeEntity(some_cfg, **results[0])
At this time, it is possible to parse all search results without
knowing what search query was sent to the server. However, it is
possible that certain responses can only be parsed if the search query
is known. If that is the case, this method will be given a new
``payload`` argument, where ``payload`` is the query sent to the
server.
As a precaution, the following is higly recommended:
* :meth:`search` may alter ``fields`` and ``query`` at will.
* :meth:`search_payload` may alter ``fields`` and ``query`` in an
idempotent manner.
* No other method should alter ``fields`` or ``query``.
:param results: A list of dicts, where each dict is a set of attributes
for one entity. The contents of these dicts are as is returned from
the server.
:returns: A list of dicts, where each dict is a set of attributes for
one entity. The contents of these dicts have been normalized and
can be used to instantiate entities.
"""
fields = self.get_fields()
normalized = []
for result in results:
# For each field that we know about, copy the corresponding field
# from the server's search result. If any extra attributes are
# copied over, Entity.__init__ will raise a NoSuchFieldError.
# Examples of problematic results from server:
#
# * organization_id (denormalized OneToOne. see above)
# * organizations, organization_ids (denormalized OneToMany. above)
# * updated_at, created_at (these may be handled in the future)
# * sp_subnet (Host.sp_subnet is an undocumented field)
#
attrs = {}
for field_name, field in fields.items():
if isinstance(field, OneToOneField):
try:
attrs[field_name] = _get_entity_id(field_name, result)
except MissingValueError:
pass
elif isinstance(field, OneToManyField):
try:
attrs[field_name] = _get_entity_ids(field_name, result)
except MissingValueError:
pass
else:
try:
attrs[field_name] = result[field_name]
except KeyError:
pass
normalized.append(attrs)
return normalized | [
"def",
"search_normalize",
"(",
"self",
",",
"results",
")",
":",
"fields",
"=",
"self",
".",
"get_fields",
"(",
")",
"normalized",
"=",
"[",
"]",
"for",
"result",
"in",
"results",
":",
"# For each field that we know about, copy the corresponding field",
"# from the... | Normalize search results so they can be used to create new entities.
See :meth:`search` for an example of how to use this method. Here's a
simplified example::
results = self.search_json()
results = self.search_normalize(results)
entity = SomeEntity(some_cfg, **results[0])
At this time, it is possible to parse all search results without
knowing what search query was sent to the server. However, it is
possible that certain responses can only be parsed if the search query
is known. If that is the case, this method will be given a new
``payload`` argument, where ``payload`` is the query sent to the
server.
As a precaution, the following is higly recommended:
* :meth:`search` may alter ``fields`` and ``query`` at will.
* :meth:`search_payload` may alter ``fields`` and ``query`` in an
idempotent manner.
* No other method should alter ``fields`` or ``query``.
:param results: A list of dicts, where each dict is a set of attributes
for one entity. The contents of these dicts are as is returned from
the server.
:returns: A list of dicts, where each dict is a set of attributes for
one entity. The contents of these dicts have been normalized and
can be used to instantiate entities. | [
"Normalize",
"search",
"results",
"so",
"they",
"can",
"be",
"used",
"to",
"create",
"new",
"entities",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L1219-L1282 | train | 208,627 |
SatelliteQE/nailgun | nailgun/entity_mixins.py | EntitySearchMixin.search_filter | def search_filter(entities, filters):
"""Read all ``entities`` and locally filter them.
This method can be used like so::
entities = EntitySearchMixin(entities, {'name': 'foo'})
In this example, only entities where ``entity.name == 'foo'`` holds
true are returned. An arbitrary number of field names and values may be
provided as filters.
.. NOTE:: This method calls :meth:`EntityReadMixin.read`. As a result,
this method only works when called on a class that also inherits
from :class:`EntityReadMixin`.
:param entities: A list of :class:`Entity` objects. All list items
should be of the same type.
:param filters: A dict in the form ``{field_name: field_value, …}``.
:raises nailgun.entity_mixins.NoSuchFieldError: If any of the fields
named in ``filters`` do not exist on the entities being filtered.
:raises: ``NotImplementedError`` If any of the fields named in
``filters`` are a :class:`nailgun.entity_fields.OneToOneField` or
:class:`nailgun.entity_fields.OneToManyField`.
"""
# Check to make sure all arguments are sane.
if len(entities) == 0:
return entities
fields = entities[0].get_fields() # assume all entities are identical
if not set(filters).issubset(fields):
raise NoSuchFieldError(
'Valid filters are {0}, but received {1} instead.'
.format(fields.keys(), filters.keys())
)
for field_name in filters:
if isinstance(fields[field_name], (OneToOneField, OneToManyField)):
raise NotImplementedError(
'Search results cannot (yet?) be locally filtered by '
'`OneToOneField`s and `OneToManyField`s. {0} is a {1}.'
.format(field_name, type(fields[field_name]).__name__)
)
# The arguments are sane. Filter away!
filtered = [entity.read() for entity in entities] # don't alter inputs
for field_name, field_value in filters.items():
filtered = [
entity for entity in filtered
if getattr(entity, field_name) == field_value
]
return filtered | python | def search_filter(entities, filters):
"""Read all ``entities`` and locally filter them.
This method can be used like so::
entities = EntitySearchMixin(entities, {'name': 'foo'})
In this example, only entities where ``entity.name == 'foo'`` holds
true are returned. An arbitrary number of field names and values may be
provided as filters.
.. NOTE:: This method calls :meth:`EntityReadMixin.read`. As a result,
this method only works when called on a class that also inherits
from :class:`EntityReadMixin`.
:param entities: A list of :class:`Entity` objects. All list items
should be of the same type.
:param filters: A dict in the form ``{field_name: field_value, …}``.
:raises nailgun.entity_mixins.NoSuchFieldError: If any of the fields
named in ``filters`` do not exist on the entities being filtered.
:raises: ``NotImplementedError`` If any of the fields named in
``filters`` are a :class:`nailgun.entity_fields.OneToOneField` or
:class:`nailgun.entity_fields.OneToManyField`.
"""
# Check to make sure all arguments are sane.
if len(entities) == 0:
return entities
fields = entities[0].get_fields() # assume all entities are identical
if not set(filters).issubset(fields):
raise NoSuchFieldError(
'Valid filters are {0}, but received {1} instead.'
.format(fields.keys(), filters.keys())
)
for field_name in filters:
if isinstance(fields[field_name], (OneToOneField, OneToManyField)):
raise NotImplementedError(
'Search results cannot (yet?) be locally filtered by '
'`OneToOneField`s and `OneToManyField`s. {0} is a {1}.'
.format(field_name, type(fields[field_name]).__name__)
)
# The arguments are sane. Filter away!
filtered = [entity.read() for entity in entities] # don't alter inputs
for field_name, field_value in filters.items():
filtered = [
entity for entity in filtered
if getattr(entity, field_name) == field_value
]
return filtered | [
"def",
"search_filter",
"(",
"entities",
",",
"filters",
")",
":",
"# Check to make sure all arguments are sane.",
"if",
"len",
"(",
"entities",
")",
"==",
"0",
":",
"return",
"entities",
"fields",
"=",
"entities",
"[",
"0",
"]",
".",
"get_fields",
"(",
")",
... | Read all ``entities`` and locally filter them.
This method can be used like so::
entities = EntitySearchMixin(entities, {'name': 'foo'})
In this example, only entities where ``entity.name == 'foo'`` holds
true are returned. An arbitrary number of field names and values may be
provided as filters.
.. NOTE:: This method calls :meth:`EntityReadMixin.read`. As a result,
this method only works when called on a class that also inherits
from :class:`EntityReadMixin`.
:param entities: A list of :class:`Entity` objects. All list items
should be of the same type.
:param filters: A dict in the form ``{field_name: field_value, …}``.
:raises nailgun.entity_mixins.NoSuchFieldError: If any of the fields
named in ``filters`` do not exist on the entities being filtered.
:raises: ``NotImplementedError`` If any of the fields named in
``filters`` are a :class:`nailgun.entity_fields.OneToOneField` or
:class:`nailgun.entity_fields.OneToManyField`. | [
"Read",
"all",
"entities",
"and",
"locally",
"filter",
"them",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L1375-L1424 | train | 208,628 |
SatelliteQE/nailgun | nailgun/config.py | _get_config_file_path | def _get_config_file_path(xdg_config_dir, xdg_config_file):
"""Search ``XDG_CONFIG_DIRS`` for a config file and return the first found.
Search each of the standard XDG configuration directories for a
configuration file. Return as soon as a configuration file is found. Beware
that by the time client code attempts to open the file, it may be gone or
otherwise inaccessible.
:param xdg_config_dir: A string. The name of the directory that is suffixed
to the end of each of the ``XDG_CONFIG_DIRS`` paths.
:param xdg_config_file: A string. The name of the configuration file that
is being searched for.
:returns: A ``str`` path to a configuration file.
:raises nailgun.config.ConfigFileError: When no configuration file can be
found.
"""
for config_dir in BaseDirectory.load_config_paths(xdg_config_dir):
path = join(config_dir, xdg_config_file)
if isfile(path):
return path
raise ConfigFileError(
'No configuration files could be located after searching for a file '
'named "{0}" in the standard XDG configuration paths, such as '
'"~/.config/{1}/".'.format(xdg_config_file, xdg_config_dir)
) | python | def _get_config_file_path(xdg_config_dir, xdg_config_file):
"""Search ``XDG_CONFIG_DIRS`` for a config file and return the first found.
Search each of the standard XDG configuration directories for a
configuration file. Return as soon as a configuration file is found. Beware
that by the time client code attempts to open the file, it may be gone or
otherwise inaccessible.
:param xdg_config_dir: A string. The name of the directory that is suffixed
to the end of each of the ``XDG_CONFIG_DIRS`` paths.
:param xdg_config_file: A string. The name of the configuration file that
is being searched for.
:returns: A ``str`` path to a configuration file.
:raises nailgun.config.ConfigFileError: When no configuration file can be
found.
"""
for config_dir in BaseDirectory.load_config_paths(xdg_config_dir):
path = join(config_dir, xdg_config_file)
if isfile(path):
return path
raise ConfigFileError(
'No configuration files could be located after searching for a file '
'named "{0}" in the standard XDG configuration paths, such as '
'"~/.config/{1}/".'.format(xdg_config_file, xdg_config_dir)
) | [
"def",
"_get_config_file_path",
"(",
"xdg_config_dir",
",",
"xdg_config_file",
")",
":",
"for",
"config_dir",
"in",
"BaseDirectory",
".",
"load_config_paths",
"(",
"xdg_config_dir",
")",
":",
"path",
"=",
"join",
"(",
"config_dir",
",",
"xdg_config_file",
")",
"if... | Search ``XDG_CONFIG_DIRS`` for a config file and return the first found.
Search each of the standard XDG configuration directories for a
configuration file. Return as soon as a configuration file is found. Beware
that by the time client code attempts to open the file, it may be gone or
otherwise inaccessible.
:param xdg_config_dir: A string. The name of the directory that is suffixed
to the end of each of the ``XDG_CONFIG_DIRS`` paths.
:param xdg_config_file: A string. The name of the configuration file that
is being searched for.
:returns: A ``str`` path to a configuration file.
:raises nailgun.config.ConfigFileError: When no configuration file can be
found. | [
"Search",
"XDG_CONFIG_DIRS",
"for",
"a",
"config",
"file",
"and",
"return",
"the",
"first",
"found",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/config.py#L27-L52 | train | 208,629 |
SatelliteQE/nailgun | nailgun/config.py | BaseServerConfig.delete | def delete(cls, label='default', path=None):
"""Delete a server configuration.
This method is thread safe.
:param label: A string. The configuration identified by ``label`` is
deleted.
:param path: A string. The configuration file to be manipulated.
Defaults to what is returned by
:func:`nailgun.config._get_config_file_path`.
:returns: ``None``
"""
if path is None:
path = _get_config_file_path(
cls._xdg_config_dir,
cls._xdg_config_file
)
cls._file_lock.acquire()
try:
with open(path) as config_file:
config = json.load(config_file)
del config[label]
with open(path, 'w') as config_file:
json.dump(config, config_file)
finally:
cls._file_lock.release() | python | def delete(cls, label='default', path=None):
"""Delete a server configuration.
This method is thread safe.
:param label: A string. The configuration identified by ``label`` is
deleted.
:param path: A string. The configuration file to be manipulated.
Defaults to what is returned by
:func:`nailgun.config._get_config_file_path`.
:returns: ``None``
"""
if path is None:
path = _get_config_file_path(
cls._xdg_config_dir,
cls._xdg_config_file
)
cls._file_lock.acquire()
try:
with open(path) as config_file:
config = json.load(config_file)
del config[label]
with open(path, 'w') as config_file:
json.dump(config, config_file)
finally:
cls._file_lock.release() | [
"def",
"delete",
"(",
"cls",
",",
"label",
"=",
"'default'",
",",
"path",
"=",
"None",
")",
":",
"if",
"path",
"is",
"None",
":",
"path",
"=",
"_get_config_file_path",
"(",
"cls",
".",
"_xdg_config_dir",
",",
"cls",
".",
"_xdg_config_file",
")",
"cls",
... | Delete a server configuration.
This method is thread safe.
:param label: A string. The configuration identified by ``label`` is
deleted.
:param path: A string. The configuration file to be manipulated.
Defaults to what is returned by
:func:`nailgun.config._get_config_file_path`.
:returns: ``None`` | [
"Delete",
"a",
"server",
"configuration",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/config.py#L125-L151 | train | 208,630 |
SatelliteQE/nailgun | nailgun/config.py | BaseServerConfig.get_labels | def get_labels(cls, path=None):
"""Get all server configuration labels.
:param path: A string. The configuration file to be manipulated.
Defaults to what is returned by
:func:`nailgun.config._get_config_file_path`.
:returns: Server configuration labels, where each label is a string.
"""
if path is None:
path = _get_config_file_path(
cls._xdg_config_dir,
cls._xdg_config_file
)
with open(path) as config_file:
# keys() returns a list in Python 2 and a view in Python 3.
return tuple(json.load(config_file).keys()) | python | def get_labels(cls, path=None):
"""Get all server configuration labels.
:param path: A string. The configuration file to be manipulated.
Defaults to what is returned by
:func:`nailgun.config._get_config_file_path`.
:returns: Server configuration labels, where each label is a string.
"""
if path is None:
path = _get_config_file_path(
cls._xdg_config_dir,
cls._xdg_config_file
)
with open(path) as config_file:
# keys() returns a list in Python 2 and a view in Python 3.
return tuple(json.load(config_file).keys()) | [
"def",
"get_labels",
"(",
"cls",
",",
"path",
"=",
"None",
")",
":",
"if",
"path",
"is",
"None",
":",
"path",
"=",
"_get_config_file_path",
"(",
"cls",
".",
"_xdg_config_dir",
",",
"cls",
".",
"_xdg_config_file",
")",
"with",
"open",
"(",
"path",
")",
... | Get all server configuration labels.
:param path: A string. The configuration file to be manipulated.
Defaults to what is returned by
:func:`nailgun.config._get_config_file_path`.
:returns: Server configuration labels, where each label is a string. | [
"Get",
"all",
"server",
"configuration",
"labels",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/config.py#L176-L192 | train | 208,631 |
SatelliteQE/nailgun | nailgun/config.py | BaseServerConfig.save | def save(self, label='default', path=None):
"""Save the current connection configuration to a file.
This method is thread safe.
:param label: A string. An identifier for the current configuration.
This allows multiple configurations with unique labels to be saved
in a single file. If a configuration identified by ``label``
already exists in the destination configuration file, it is
replaced.
:param path: A string. The configuration file to be manipulated. By
default, an XDG-compliant configuration file is used. A
configuration file is created if one does not exist already.
:returns: ``None``
"""
# What will we write out?
cfg = vars(self)
if 'version' in cfg: # pragma: no cover
cfg['version'] = str(cfg['version'])
# Where is the file we're writing to?
if path is None:
path = join(
BaseDirectory.save_config_path(self._xdg_config_dir),
self._xdg_config_file
)
self._file_lock.acquire()
try:
# Either read an existing config or make an empty one. Then update
# the config and write it out.
try:
with open(path) as config_file:
config = json.load(config_file)
except IOError: # pragma: no cover
config = {}
config[label] = cfg
with open(path, 'w') as config_file:
json.dump(config, config_file)
finally:
self._file_lock.release() | python | def save(self, label='default', path=None):
"""Save the current connection configuration to a file.
This method is thread safe.
:param label: A string. An identifier for the current configuration.
This allows multiple configurations with unique labels to be saved
in a single file. If a configuration identified by ``label``
already exists in the destination configuration file, it is
replaced.
:param path: A string. The configuration file to be manipulated. By
default, an XDG-compliant configuration file is used. A
configuration file is created if one does not exist already.
:returns: ``None``
"""
# What will we write out?
cfg = vars(self)
if 'version' in cfg: # pragma: no cover
cfg['version'] = str(cfg['version'])
# Where is the file we're writing to?
if path is None:
path = join(
BaseDirectory.save_config_path(self._xdg_config_dir),
self._xdg_config_file
)
self._file_lock.acquire()
try:
# Either read an existing config or make an empty one. Then update
# the config and write it out.
try:
with open(path) as config_file:
config = json.load(config_file)
except IOError: # pragma: no cover
config = {}
config[label] = cfg
with open(path, 'w') as config_file:
json.dump(config, config_file)
finally:
self._file_lock.release() | [
"def",
"save",
"(",
"self",
",",
"label",
"=",
"'default'",
",",
"path",
"=",
"None",
")",
":",
"# What will we write out?",
"cfg",
"=",
"vars",
"(",
"self",
")",
"if",
"'version'",
"in",
"cfg",
":",
"# pragma: no cover",
"cfg",
"[",
"'version'",
"]",
"=... | Save the current connection configuration to a file.
This method is thread safe.
:param label: A string. An identifier for the current configuration.
This allows multiple configurations with unique labels to be saved
in a single file. If a configuration identified by ``label``
already exists in the destination configuration file, it is
replaced.
:param path: A string. The configuration file to be manipulated. By
default, an XDG-compliant configuration file is used. A
configuration file is created if one does not exist already.
:returns: ``None`` | [
"Save",
"the",
"current",
"connection",
"configuration",
"to",
"a",
"file",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/config.py#L194-L235 | train | 208,632 |
SatelliteQE/nailgun | nailgun/client.py | _log_request | def _log_request(method, url, kwargs, data=None, params=None):
"""Log out information about the arguments given.
The arguments provided to this function correspond to the arguments that
one can pass to ``requests.request``.
:return: Nothing is returned.
"""
logger.debug(
'Making HTTP %s request to %s with %s, %s and %s.',
method,
url,
'options {0}'.format(kwargs) if len(kwargs) > 0 else 'no options',
'params {0}'.format(params) if params else 'no params',
'data {0}'.format(data) if data is not None else 'no data',
) | python | def _log_request(method, url, kwargs, data=None, params=None):
"""Log out information about the arguments given.
The arguments provided to this function correspond to the arguments that
one can pass to ``requests.request``.
:return: Nothing is returned.
"""
logger.debug(
'Making HTTP %s request to %s with %s, %s and %s.',
method,
url,
'options {0}'.format(kwargs) if len(kwargs) > 0 else 'no options',
'params {0}'.format(params) if params else 'no params',
'data {0}'.format(data) if data is not None else 'no data',
) | [
"def",
"_log_request",
"(",
"method",
",",
"url",
",",
"kwargs",
",",
"data",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"'Making HTTP %s request to %s with %s, %s and %s.'",
",",
"method",
",",
"url",
",",
"'options {0}'",... | Log out information about the arguments given.
The arguments provided to this function correspond to the arguments that
one can pass to ``requests.request``.
:return: Nothing is returned. | [
"Log",
"out",
"information",
"about",
"the",
"arguments",
"given",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/client.py#L88-L104 | train | 208,633 |
SatelliteQE/nailgun | nailgun/client.py | _log_response | def _log_response(response):
"""Log out information about a ``Request`` object.
After calling ``requests.request`` or one of its convenience methods, the
object returned can be passed to this method. If done, information about
the object returned is logged.
:return: Nothing is returned.
"""
message = u'Received HTTP {0} response: {1}'.format(
response.status_code,
response.text
)
if response.status_code >= 400: # pragma: no cover
logger.warning(message)
else:
logger.debug(message) | python | def _log_response(response):
"""Log out information about a ``Request`` object.
After calling ``requests.request`` or one of its convenience methods, the
object returned can be passed to this method. If done, information about
the object returned is logged.
:return: Nothing is returned.
"""
message = u'Received HTTP {0} response: {1}'.format(
response.status_code,
response.text
)
if response.status_code >= 400: # pragma: no cover
logger.warning(message)
else:
logger.debug(message) | [
"def",
"_log_response",
"(",
"response",
")",
":",
"message",
"=",
"u'Received HTTP {0} response: {1}'",
".",
"format",
"(",
"response",
".",
"status_code",
",",
"response",
".",
"text",
")",
"if",
"response",
".",
"status_code",
">=",
"400",
":",
"# pragma: no ... | Log out information about a ``Request`` object.
After calling ``requests.request`` or one of its convenience methods, the
object returned can be passed to this method. If done, information about
the object returned is logged.
:return: Nothing is returned. | [
"Log",
"out",
"information",
"about",
"a",
"Request",
"object",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/client.py#L107-L124 | train | 208,634 |
SatelliteQE/nailgun | nailgun/client.py | request | def request(method, url, **kwargs):
"""A wrapper for ``requests.request``."""
_set_content_type(kwargs)
if _content_type_is_json(kwargs) and kwargs.get('data') is not None:
kwargs['data'] = dumps(kwargs['data'])
_log_request(method, url, kwargs)
response = requests.request(method, url, **kwargs)
_log_response(response)
return response | python | def request(method, url, **kwargs):
"""A wrapper for ``requests.request``."""
_set_content_type(kwargs)
if _content_type_is_json(kwargs) and kwargs.get('data') is not None:
kwargs['data'] = dumps(kwargs['data'])
_log_request(method, url, kwargs)
response = requests.request(method, url, **kwargs)
_log_response(response)
return response | [
"def",
"request",
"(",
"method",
",",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"_set_content_type",
"(",
"kwargs",
")",
"if",
"_content_type_is_json",
"(",
"kwargs",
")",
"and",
"kwargs",
".",
"get",
"(",
"'data'",
")",
"is",
"not",
"None",
":",
"kwar... | A wrapper for ``requests.request``. | [
"A",
"wrapper",
"for",
"requests",
".",
"request",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/client.py#L127-L135 | train | 208,635 |
SatelliteQE/nailgun | nailgun/client.py | head | def head(url, **kwargs):
"""A wrapper for ``requests.head``."""
_set_content_type(kwargs)
if _content_type_is_json(kwargs) and kwargs.get('data') is not None:
kwargs['data'] = dumps(kwargs['data'])
_log_request('HEAD', url, kwargs)
response = requests.head(url, **kwargs)
_log_response(response)
return response | python | def head(url, **kwargs):
"""A wrapper for ``requests.head``."""
_set_content_type(kwargs)
if _content_type_is_json(kwargs) and kwargs.get('data') is not None:
kwargs['data'] = dumps(kwargs['data'])
_log_request('HEAD', url, kwargs)
response = requests.head(url, **kwargs)
_log_response(response)
return response | [
"def",
"head",
"(",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"_set_content_type",
"(",
"kwargs",
")",
"if",
"_content_type_is_json",
"(",
"kwargs",
")",
"and",
"kwargs",
".",
"get",
"(",
"'data'",
")",
"is",
"not",
"None",
":",
"kwargs",
"[",
"'data'"... | A wrapper for ``requests.head``. | [
"A",
"wrapper",
"for",
"requests",
".",
"head",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/client.py#L138-L146 | train | 208,636 |
SatelliteQE/nailgun | nailgun/client.py | post | def post(url, data=None, json=None, **kwargs):
"""A wrapper for ``requests.post``."""
_set_content_type(kwargs)
if _content_type_is_json(kwargs) and data is not None:
data = dumps(data)
_log_request('POST', url, kwargs, data)
response = requests.post(url, data, json, **kwargs)
_log_response(response)
return response | python | def post(url, data=None, json=None, **kwargs):
"""A wrapper for ``requests.post``."""
_set_content_type(kwargs)
if _content_type_is_json(kwargs) and data is not None:
data = dumps(data)
_log_request('POST', url, kwargs, data)
response = requests.post(url, data, json, **kwargs)
_log_response(response)
return response | [
"def",
"post",
"(",
"url",
",",
"data",
"=",
"None",
",",
"json",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"_set_content_type",
"(",
"kwargs",
")",
"if",
"_content_type_is_json",
"(",
"kwargs",
")",
"and",
"data",
"is",
"not",
"None",
":",
"da... | A wrapper for ``requests.post``. | [
"A",
"wrapper",
"for",
"requests",
".",
"post",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/client.py#L160-L168 | train | 208,637 |
SatelliteQE/nailgun | nailgun/client.py | put | def put(url, data=None, **kwargs):
"""A wrapper for ``requests.put``. Sends a PUT request."""
_set_content_type(kwargs)
if _content_type_is_json(kwargs) and data is not None:
data = dumps(data)
_log_request('PUT', url, kwargs, data)
response = requests.put(url, data, **kwargs)
_log_response(response)
return response | python | def put(url, data=None, **kwargs):
"""A wrapper for ``requests.put``. Sends a PUT request."""
_set_content_type(kwargs)
if _content_type_is_json(kwargs) and data is not None:
data = dumps(data)
_log_request('PUT', url, kwargs, data)
response = requests.put(url, data, **kwargs)
_log_response(response)
return response | [
"def",
"put",
"(",
"url",
",",
"data",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"_set_content_type",
"(",
"kwargs",
")",
"if",
"_content_type_is_json",
"(",
"kwargs",
")",
"and",
"data",
"is",
"not",
"None",
":",
"data",
"=",
"dumps",
"(",
"da... | A wrapper for ``requests.put``. Sends a PUT request. | [
"A",
"wrapper",
"for",
"requests",
".",
"put",
".",
"Sends",
"a",
"PUT",
"request",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/client.py#L171-L179 | train | 208,638 |
SatelliteQE/nailgun | nailgun/entities.py | _handle_response | def _handle_response(response, server_config, synchronous=False, timeout=None):
"""Handle a server's response in a typical fashion.
Do the following:
1. Check the server's response for an HTTP status code indicating an error.
2. Poll the server for a foreman task to complete if an HTTP 202 (accepted)
status code is returned and ``synchronous is True``.
3. Immediately return if an HTTP "NO CONTENT" response is received.
4. Determine what type of the content returned from server. Depending on
the type method should return server's response, with all JSON decoded
or just response content itself.
:param response: A response object as returned by one of the functions in
:mod:`nailgun.client` or the requests library.
:param server_config: A `nailgun.config.ServerConfig` object.
:param synchronous: Should this function poll the server?
:param timeout: Maximum number of seconds to wait until timing out.
Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``.
"""
response.raise_for_status()
if synchronous is True and response.status_code == ACCEPTED:
return ForemanTask(
server_config, id=response.json()['id']).poll(timeout=timeout)
if response.status_code == NO_CONTENT:
return
if 'application/json' in response.headers.get('content-type', '').lower():
return response.json()
elif isinstance(response.content, bytes):
return response.content.decode('utf-8')
else:
return response.content | python | def _handle_response(response, server_config, synchronous=False, timeout=None):
"""Handle a server's response in a typical fashion.
Do the following:
1. Check the server's response for an HTTP status code indicating an error.
2. Poll the server for a foreman task to complete if an HTTP 202 (accepted)
status code is returned and ``synchronous is True``.
3. Immediately return if an HTTP "NO CONTENT" response is received.
4. Determine what type of the content returned from server. Depending on
the type method should return server's response, with all JSON decoded
or just response content itself.
:param response: A response object as returned by one of the functions in
:mod:`nailgun.client` or the requests library.
:param server_config: A `nailgun.config.ServerConfig` object.
:param synchronous: Should this function poll the server?
:param timeout: Maximum number of seconds to wait until timing out.
Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``.
"""
response.raise_for_status()
if synchronous is True and response.status_code == ACCEPTED:
return ForemanTask(
server_config, id=response.json()['id']).poll(timeout=timeout)
if response.status_code == NO_CONTENT:
return
if 'application/json' in response.headers.get('content-type', '').lower():
return response.json()
elif isinstance(response.content, bytes):
return response.content.decode('utf-8')
else:
return response.content | [
"def",
"_handle_response",
"(",
"response",
",",
"server_config",
",",
"synchronous",
"=",
"False",
",",
"timeout",
"=",
"None",
")",
":",
"response",
".",
"raise_for_status",
"(",
")",
"if",
"synchronous",
"is",
"True",
"and",
"response",
".",
"status_code",
... | Handle a server's response in a typical fashion.
Do the following:
1. Check the server's response for an HTTP status code indicating an error.
2. Poll the server for a foreman task to complete if an HTTP 202 (accepted)
status code is returned and ``synchronous is True``.
3. Immediately return if an HTTP "NO CONTENT" response is received.
4. Determine what type of the content returned from server. Depending on
the type method should return server's response, with all JSON decoded
or just response content itself.
:param response: A response object as returned by one of the functions in
:mod:`nailgun.client` or the requests library.
:param server_config: A `nailgun.config.ServerConfig` object.
:param synchronous: Should this function poll the server?
:param timeout: Maximum number of seconds to wait until timing out.
Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``. | [
"Handle",
"a",
"server",
"s",
"response",
"in",
"a",
"typical",
"fashion",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L97-L129 | train | 208,639 |
SatelliteQE/nailgun | nailgun/entities.py | AuthSourceLDAP.create_missing | def create_missing(self):
"""Possibly set several extra instance attributes.
If ``onthefly_register`` is set and is true, set the following instance
attributes:
* account_password
* account_firstname
* account_lastname
* attr_login
* attr_mail
"""
super(AuthSourceLDAP, self).create_missing()
if getattr(self, 'onthefly_register', False) is True:
for field in (
'account_password',
'attr_firstname',
'attr_lastname',
'attr_login',
'attr_mail'):
if not hasattr(self, field):
setattr(self, field, self._fields[field].gen_value()) | python | def create_missing(self):
"""Possibly set several extra instance attributes.
If ``onthefly_register`` is set and is true, set the following instance
attributes:
* account_password
* account_firstname
* account_lastname
* attr_login
* attr_mail
"""
super(AuthSourceLDAP, self).create_missing()
if getattr(self, 'onthefly_register', False) is True:
for field in (
'account_password',
'attr_firstname',
'attr_lastname',
'attr_login',
'attr_mail'):
if not hasattr(self, field):
setattr(self, field, self._fields[field].gen_value()) | [
"def",
"create_missing",
"(",
"self",
")",
":",
"super",
"(",
"AuthSourceLDAP",
",",
"self",
")",
".",
"create_missing",
"(",
")",
"if",
"getattr",
"(",
"self",
",",
"'onthefly_register'",
",",
"False",
")",
"is",
"True",
":",
"for",
"field",
"in",
"(",
... | Possibly set several extra instance attributes.
If ``onthefly_register`` is set and is true, set the following instance
attributes:
* account_password
* account_firstname
* account_lastname
* attr_login
* attr_mail | [
"Possibly",
"set",
"several",
"extra",
"instance",
"attributes",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L597-L619 | train | 208,640 |
SatelliteQE/nailgun | nailgun/entities.py | AuthSourceLDAP.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Do not read the ``account_password`` attribute. Work around a bug.
For more information, see `Bugzilla #1243036
<https://bugzilla.redhat.com/show_bug.cgi?id=1243036>`_.
"""
if attrs is None:
attrs = self.update_json([])
if ignore is None:
ignore = set()
ignore.add('account_password')
return super(AuthSourceLDAP, self).read(entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Do not read the ``account_password`` attribute. Work around a bug.
For more information, see `Bugzilla #1243036
<https://bugzilla.redhat.com/show_bug.cgi?id=1243036>`_.
"""
if attrs is None:
attrs = self.update_json([])
if ignore is None:
ignore = set()
ignore.add('account_password')
return super(AuthSourceLDAP, self).read(entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"attrs",
"is",
"None",
":",
"attrs",
"=",
"self",
".",
"update_json",
"(",
"[",
"]",
")",
... | Do not read the ``account_password`` attribute. Work around a bug.
For more information, see `Bugzilla #1243036
<https://bugzilla.redhat.com/show_bug.cgi?id=1243036>`_. | [
"Do",
"not",
"read",
"the",
"account_password",
"attribute",
".",
"Work",
"around",
"a",
"bug",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L621-L633 | train | 208,641 |
SatelliteQE/nailgun | nailgun/entities.py | DiscoveryRule.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Work around a bug. Rename ``search`` to ``search_``.
For more information on the bug, see `Bugzilla #1257255
<https://bugzilla.redhat.com/show_bug.cgi?id=1257255>`_.
"""
if attrs is None:
attrs = self.read_json()
attrs['search_'] = attrs.pop('search')
# Satellite doesn't return this attribute. See BZ 1257255.
attr = 'max_count'
if ignore is None:
ignore = set()
if attr not in ignore:
# We cannot call `self.update_json([])`, as an ID might not be
# present on self. However, `attrs` is guaranteed to have an ID.
attrs[attr] = DiscoveryRule(
self._server_config,
id=attrs['id'],
).update_json([])[attr]
return super(DiscoveryRule, self).read(entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Work around a bug. Rename ``search`` to ``search_``.
For more information on the bug, see `Bugzilla #1257255
<https://bugzilla.redhat.com/show_bug.cgi?id=1257255>`_.
"""
if attrs is None:
attrs = self.read_json()
attrs['search_'] = attrs.pop('search')
# Satellite doesn't return this attribute. See BZ 1257255.
attr = 'max_count'
if ignore is None:
ignore = set()
if attr not in ignore:
# We cannot call `self.update_json([])`, as an ID might not be
# present on self. However, `attrs` is guaranteed to have an ID.
attrs[attr] = DiscoveryRule(
self._server_config,
id=attrs['id'],
).update_json([])[attr]
return super(DiscoveryRule, self).read(entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"attrs",
"is",
"None",
":",
"attrs",
"=",
"self",
".",
"read_json",
"(",
")",
"attrs",
"[",... | Work around a bug. Rename ``search`` to ``search_``.
For more information on the bug, see `Bugzilla #1257255
<https://bugzilla.redhat.com/show_bug.cgi?id=1257255>`_. | [
"Work",
"around",
"a",
"bug",
".",
"Rename",
"search",
"to",
"search_",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L1082-L1104 | train | 208,642 |
SatelliteQE/nailgun | nailgun/entities.py | ExternalUserGroup.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore usergroup from read and alter auth_source_ldap with auth_source
"""
if entity is None:
entity = type(self)(
self._server_config,
usergroup=self.usergroup, # pylint:disable=no-member
)
if ignore is None:
ignore = set()
ignore.add('usergroup')
if attrs is None:
attrs = self.read_json()
attrs['auth_source'] = attrs.pop('auth_source_ldap')
return super(ExternalUserGroup, self).read(entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore usergroup from read and alter auth_source_ldap with auth_source
"""
if entity is None:
entity = type(self)(
self._server_config,
usergroup=self.usergroup, # pylint:disable=no-member
)
if ignore is None:
ignore = set()
ignore.add('usergroup')
if attrs is None:
attrs = self.read_json()
attrs['auth_source'] = attrs.pop('auth_source_ldap')
return super(ExternalUserGroup, self).read(entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"entity",
"is",
"None",
":",
"entity",
"=",
"type",
"(",
"self",
")",
"(",
"self",
".",
"... | Ignore usergroup from read and alter auth_source_ldap with auth_source | [
"Ignore",
"usergroup",
"from",
"read",
"and",
"alter",
"auth_source_ldap",
"with",
"auth_source"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L1216-L1230 | train | 208,643 |
SatelliteQE/nailgun | nailgun/entities.py | JobInvocation.run | def run(self, synchronous=True, **kwargs):
"""Helper to run existing job template
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
'data' supports next fields:
required:
job_template_id/feature,
targeting_type,
search_query/bookmark_id,
inputs
optional:
description_format,
concurrency_control
scheduling,
ssh,
recurrence,
execution_timeout_interval
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
if 'data' in kwargs:
if 'job_template_id' not in kwargs['data'] and 'feature' not in kwargs['data']:
raise KeyError('Provide either job_template_id or feature value')
if 'search_query' not in kwargs['data'] and 'bookmark_id' not in kwargs['data']:
raise KeyError('Provide either search_query or bookmark_id value')
for param_name in ['targeting_type', 'inputs']:
if param_name not in kwargs['data']:
raise KeyError('Provide {} value'.format(param_name))
kwargs['data'] = {u'job_invocation': kwargs['data']}
response = client.post(self.path('base'), **kwargs)
response.raise_for_status()
if synchronous is True:
return ForemanTask(
server_config=self._server_config, id=response.json()['task']['id']).poll()
return response.json() | python | def run(self, synchronous=True, **kwargs):
"""Helper to run existing job template
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
'data' supports next fields:
required:
job_template_id/feature,
targeting_type,
search_query/bookmark_id,
inputs
optional:
description_format,
concurrency_control
scheduling,
ssh,
recurrence,
execution_timeout_interval
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
if 'data' in kwargs:
if 'job_template_id' not in kwargs['data'] and 'feature' not in kwargs['data']:
raise KeyError('Provide either job_template_id or feature value')
if 'search_query' not in kwargs['data'] and 'bookmark_id' not in kwargs['data']:
raise KeyError('Provide either search_query or bookmark_id value')
for param_name in ['targeting_type', 'inputs']:
if param_name not in kwargs['data']:
raise KeyError('Provide {} value'.format(param_name))
kwargs['data'] = {u'job_invocation': kwargs['data']}
response = client.post(self.path('base'), **kwargs)
response.raise_for_status()
if synchronous is True:
return ForemanTask(
server_config=self._server_config, id=response.json()['task']['id']).poll()
return response.json() | [
"def",
"run",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"get_client_kw... | Helper to run existing job template
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
'data' supports next fields:
required:
job_template_id/feature,
targeting_type,
search_query/bookmark_id,
inputs
optional:
description_format,
concurrency_control
scheduling,
ssh,
recurrence,
execution_timeout_interval
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Helper",
"to",
"run",
"existing",
"job",
"template"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L1576-L1617 | train | 208,644 |
SatelliteQE/nailgun | nailgun/entities.py | JobTemplate.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore the template inputs when initially reading the job template.
Look up each TemplateInput entity separately
and afterwords add them to the JobTemplate entity."""
if attrs is None:
attrs = self.read_json(params=params)
if ignore is None:
ignore = set()
ignore.add('template_inputs')
entity = super(JobTemplate, self).read(entity=entity, attrs=attrs,
ignore=ignore, params=params)
referenced_entities = [
TemplateInput(entity._server_config, id=entity_id,
template=JobTemplate(entity._server_config,
id=entity.id))
for entity_id
in _get_entity_ids('template_inputs', attrs)
]
setattr(entity, 'template_inputs', referenced_entities)
return entity | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore the template inputs when initially reading the job template.
Look up each TemplateInput entity separately
and afterwords add them to the JobTemplate entity."""
if attrs is None:
attrs = self.read_json(params=params)
if ignore is None:
ignore = set()
ignore.add('template_inputs')
entity = super(JobTemplate, self).read(entity=entity, attrs=attrs,
ignore=ignore, params=params)
referenced_entities = [
TemplateInput(entity._server_config, id=entity_id,
template=JobTemplate(entity._server_config,
id=entity.id))
for entity_id
in _get_entity_ids('template_inputs', attrs)
]
setattr(entity, 'template_inputs', referenced_entities)
return entity | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"attrs",
"is",
"None",
":",
"attrs",
"=",
"self",
".",
"read_json",
"(",
"params",
"=",
"pa... | Ignore the template inputs when initially reading the job template.
Look up each TemplateInput entity separately
and afterwords add them to the JobTemplate entity. | [
"Ignore",
"the",
"template",
"inputs",
"when",
"initially",
"reading",
"the",
"job",
"template",
".",
"Look",
"up",
"each",
"TemplateInput",
"entity",
"separately",
"and",
"afterwords",
"add",
"them",
"to",
"the",
"JobTemplate",
"entity",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L1667-L1686 | train | 208,645 |
SatelliteQE/nailgun | nailgun/entities.py | ContentUpload.upload | def upload(self, filepath, filename=None):
"""Upload content.
:param filepath: path to the file that should be chunked and uploaded
:param filename: name of the file on the server, defaults to the
last part of the ``filepath`` if not set
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
:raises nailgun.entities.APIResponseError: If the response has a status
other than "success".
.. _POST a Multipart-Encoded File:
http://docs.python-requests.org/en/latest/user/quickstart/#post-a-multipart-encoded-file
.. _POST Multiple Multipart-Encoded Files:
http://docs.python-requests.org/en/latest/user/advanced/#post-multiple-multipart-encoded-files
"""
if not filename:
filename = os.path.basename(filepath)
content_upload = self.create()
try:
offset = 0
content_chunk_size = 2 * 1024 * 1024
with open(filepath, 'rb') as contentfile:
chunk = contentfile.read(content_chunk_size)
while len(chunk) > 0:
data = {'offset': offset,
'content': chunk}
content_upload.update(data)
offset += len(chunk)
chunk = contentfile.read(content_chunk_size)
size = 0
checksum = hashlib.sha256()
with open(filepath, 'rb') as contentfile:
contents = contentfile.read()
size = len(contents)
checksum.update(contents)
uploads = [{'id': content_upload.upload_id, 'name': filename,
'size': size, 'checksum': checksum.hexdigest()}]
# pylint:disable=no-member
json = self.repository.import_uploads(uploads)
finally:
content_upload.delete()
return json | python | def upload(self, filepath, filename=None):
"""Upload content.
:param filepath: path to the file that should be chunked and uploaded
:param filename: name of the file on the server, defaults to the
last part of the ``filepath`` if not set
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
:raises nailgun.entities.APIResponseError: If the response has a status
other than "success".
.. _POST a Multipart-Encoded File:
http://docs.python-requests.org/en/latest/user/quickstart/#post-a-multipart-encoded-file
.. _POST Multiple Multipart-Encoded Files:
http://docs.python-requests.org/en/latest/user/advanced/#post-multiple-multipart-encoded-files
"""
if not filename:
filename = os.path.basename(filepath)
content_upload = self.create()
try:
offset = 0
content_chunk_size = 2 * 1024 * 1024
with open(filepath, 'rb') as contentfile:
chunk = contentfile.read(content_chunk_size)
while len(chunk) > 0:
data = {'offset': offset,
'content': chunk}
content_upload.update(data)
offset += len(chunk)
chunk = contentfile.read(content_chunk_size)
size = 0
checksum = hashlib.sha256()
with open(filepath, 'rb') as contentfile:
contents = contentfile.read()
size = len(contents)
checksum.update(contents)
uploads = [{'id': content_upload.upload_id, 'name': filename,
'size': size, 'checksum': checksum.hexdigest()}]
# pylint:disable=no-member
json = self.repository.import_uploads(uploads)
finally:
content_upload.delete()
return json | [
"def",
"upload",
"(",
"self",
",",
"filepath",
",",
"filename",
"=",
"None",
")",
":",
"if",
"not",
"filename",
":",
"filename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filepath",
")",
"content_upload",
"=",
"self",
".",
"create",
"(",
")",
"t... | Upload content.
:param filepath: path to the file that should be chunked and uploaded
:param filename: name of the file on the server, defaults to the
last part of the ``filepath`` if not set
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
:raises nailgun.entities.APIResponseError: If the response has a status
other than "success".
.. _POST a Multipart-Encoded File:
http://docs.python-requests.org/en/latest/user/quickstart/#post-a-multipart-encoded-file
.. _POST Multiple Multipart-Encoded Files:
http://docs.python-requests.org/en/latest/user/advanced/#post-multiple-multipart-encoded-files | [
"Upload",
"content",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L2104-L2155 | train | 208,646 |
SatelliteQE/nailgun | nailgun/entities.py | ContentViewFilterRule.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Do not read certain fields.
Do not expect the server to return the ``content_view_filter``
attribute. This has no practical impact, as the attribute must be
provided when a :class:`nailgun.entities.ContentViewFilterRule` is
instantiated.
Also, ignore any field that is not returned by the server. For more
information, see `Bugzilla #1238408
<https://bugzilla.redhat.com/show_bug.cgi?id=1238408>`_.
"""
if entity is None:
entity = type(self)(
self._server_config,
# pylint:disable=no-member
content_view_filter=self.content_view_filter,
)
if attrs is None:
attrs = self.read_json()
if ignore is None:
ignore = set()
ignore.add('content_view_filter')
ignore.update([
field_name
for field_name in entity.get_fields().keys()
if field_name not in attrs
])
return super(ContentViewFilterRule, self).read(
entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Do not read certain fields.
Do not expect the server to return the ``content_view_filter``
attribute. This has no practical impact, as the attribute must be
provided when a :class:`nailgun.entities.ContentViewFilterRule` is
instantiated.
Also, ignore any field that is not returned by the server. For more
information, see `Bugzilla #1238408
<https://bugzilla.redhat.com/show_bug.cgi?id=1238408>`_.
"""
if entity is None:
entity = type(self)(
self._server_config,
# pylint:disable=no-member
content_view_filter=self.content_view_filter,
)
if attrs is None:
attrs = self.read_json()
if ignore is None:
ignore = set()
ignore.add('content_view_filter')
ignore.update([
field_name
for field_name in entity.get_fields().keys()
if field_name not in attrs
])
return super(ContentViewFilterRule, self).read(
entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"entity",
"is",
"None",
":",
"entity",
"=",
"type",
"(",
"self",
")",
"(",
"self",
".",
"... | Do not read certain fields.
Do not expect the server to return the ``content_view_filter``
attribute. This has no practical impact, as the attribute must be
provided when a :class:`nailgun.entities.ContentViewFilterRule` is
instantiated.
Also, ignore any field that is not returned by the server. For more
information, see `Bugzilla #1238408
<https://bugzilla.redhat.com/show_bug.cgi?id=1238408>`_. | [
"Do",
"not",
"read",
"certain",
"fields",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L2287-L2317 | train | 208,647 |
SatelliteQE/nailgun | nailgun/entities.py | ContentView.publish | def publish(self, synchronous=True, **kwargs):
"""Helper for publishing an existing content view.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
if 'data' in kwargs and 'id' not in kwargs['data']:
kwargs['data']['id'] = self.id # pylint:disable=no-member
kwargs.update(self._server_config.get_client_kwargs())
response = client.post(self.path('publish'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | python | def publish(self, synchronous=True, **kwargs):
"""Helper for publishing an existing content view.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
if 'data' in kwargs and 'id' not in kwargs['data']:
kwargs['data']['id'] = self.id # pylint:disable=no-member
kwargs.update(self._server_config.get_client_kwargs())
response = client.post(self.path('publish'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | [
"def",
"publish",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"if",
"'data'",
"in",
"kwargs",
"and",
"'id'",
"not",
"in",
"kwargs",
"[",... | Helper for publishing an existing content view.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Helper",
"for",
"publishing",
"an",
"existing",
"content",
"view",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L2619-L2636 | train | 208,648 |
SatelliteQE/nailgun | nailgun/entities.py | ContentView.delete_from_environment | def delete_from_environment(self, environment, synchronous=True):
"""Delete this content view version from an environment.
This method acts much like
:meth:`nailgun.entity_mixins.EntityDeleteMixin.delete`. The
documentation on that method describes how the deletion procedure works
in general. This method differs only in accepting an ``environment``
parameter.
:param environment: A :class:`nailgun.entities.Environment` object. The
environment's ``id`` parameter *must* be specified. As a
convenience, an environment ID may be passed in instead of an
``Environment`` object.
"""
if isinstance(environment, Environment):
environment_id = environment.id
else:
environment_id = environment
response = client.delete(
'{0}/environments/{1}'.format(self.path(), environment_id),
**self._server_config.get_client_kwargs()
)
return _handle_response(response, self._server_config, synchronous) | python | def delete_from_environment(self, environment, synchronous=True):
"""Delete this content view version from an environment.
This method acts much like
:meth:`nailgun.entity_mixins.EntityDeleteMixin.delete`. The
documentation on that method describes how the deletion procedure works
in general. This method differs only in accepting an ``environment``
parameter.
:param environment: A :class:`nailgun.entities.Environment` object. The
environment's ``id`` parameter *must* be specified. As a
convenience, an environment ID may be passed in instead of an
``Environment`` object.
"""
if isinstance(environment, Environment):
environment_id = environment.id
else:
environment_id = environment
response = client.delete(
'{0}/environments/{1}'.format(self.path(), environment_id),
**self._server_config.get_client_kwargs()
)
return _handle_response(response, self._server_config, synchronous) | [
"def",
"delete_from_environment",
"(",
"self",
",",
"environment",
",",
"synchronous",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"environment",
",",
"Environment",
")",
":",
"environment_id",
"=",
"environment",
".",
"id",
"else",
":",
"environment_id",
... | Delete this content view version from an environment.
This method acts much like
:meth:`nailgun.entity_mixins.EntityDeleteMixin.delete`. The
documentation on that method describes how the deletion procedure works
in general. This method differs only in accepting an ``environment``
parameter.
:param environment: A :class:`nailgun.entities.Environment` object. The
environment's ``id`` parameter *must* be specified. As a
convenience, an environment ID may be passed in instead of an
``Environment`` object. | [
"Delete",
"this",
"content",
"view",
"version",
"from",
"an",
"environment",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L2674-L2697 | train | 208,649 |
SatelliteQE/nailgun | nailgun/entities.py | ContentViewComponent.add | def add(self, synchronous=True, **kwargs):
"""Add provided Content View Component.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
if 'data' not in kwargs:
# data is required
kwargs['data'] = dict()
if 'component_ids' not in kwargs['data']:
kwargs['data']['components'] = [_payload(self.get_fields(), self.get_values())]
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(self.path('add'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | python | def add(self, synchronous=True, **kwargs):
"""Add provided Content View Component.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
if 'data' not in kwargs:
# data is required
kwargs['data'] = dict()
if 'component_ids' not in kwargs['data']:
kwargs['data']['components'] = [_payload(self.get_fields(), self.get_values())]
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(self.path('add'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | [
"def",
"add",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"if",
"'data'",
"not",
"in",
"kwargs",
":",
"# data is required",
"kwargs",
"[",... | Add provided Content View Component.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Add",
"provided",
"Content",
"View",
"Component",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L2760-L2780 | train | 208,650 |
SatelliteQE/nailgun | nailgun/entities.py | Filter.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Deal with different named data returned from the server
"""
if attrs is None:
attrs = self.read_json()
attrs['override'] = attrs.pop('override?')
attrs['unlimited'] = attrs.pop('unlimited?')
return super(Filter, self).read(entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Deal with different named data returned from the server
"""
if attrs is None:
attrs = self.read_json()
attrs['override'] = attrs.pop('override?')
attrs['unlimited'] = attrs.pop('unlimited?')
return super(Filter, self).read(entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"attrs",
"is",
"None",
":",
"attrs",
"=",
"self",
".",
"read_json",
"(",
")",
"attrs",
"[",... | Deal with different named data returned from the server | [
"Deal",
"with",
"different",
"named",
"data",
"returned",
"from",
"the",
"server"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L3108-L3115 | train | 208,651 |
SatelliteQE/nailgun | nailgun/entities.py | ForemanTask.poll | def poll(self, poll_rate=None, timeout=None):
"""Return the status of a task or timeout.
There are several API calls that trigger asynchronous tasks, such as
synchronizing a repository, or publishing or promoting a content view.
It is possible to check on the status of a task if you know its UUID.
This method polls a task once every ``poll_rate`` seconds and, upon
task completion, returns information about that task.
:param poll_rate: Delay between the end of one task check-up and
the start of the next check-up. Defaults to
``nailgun.entity_mixins.TASK_POLL_RATE``.
:param timeout: Maximum number of seconds to wait until timing out.
Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``.
:returns: Information about the asynchronous task.
:raises: ``nailgun.entity_mixins.TaskTimedOutError`` if the task
completes with any result other than "success".
:raises: ``nailgun.entity_mixins.TaskFailedError`` if the task finishes
with any result other than "success".
:raises: ``requests.exceptions.HTTPError`` If the API returns a message
with an HTTP 4XX or 5XX status code.
"""
# See nailgun.entity_mixins._poll_task for an explanation of why a
# private method is called.
return _poll_task(
self.id, # pylint:disable=no-member
self._server_config,
poll_rate,
timeout
) | python | def poll(self, poll_rate=None, timeout=None):
"""Return the status of a task or timeout.
There are several API calls that trigger asynchronous tasks, such as
synchronizing a repository, or publishing or promoting a content view.
It is possible to check on the status of a task if you know its UUID.
This method polls a task once every ``poll_rate`` seconds and, upon
task completion, returns information about that task.
:param poll_rate: Delay between the end of one task check-up and
the start of the next check-up. Defaults to
``nailgun.entity_mixins.TASK_POLL_RATE``.
:param timeout: Maximum number of seconds to wait until timing out.
Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``.
:returns: Information about the asynchronous task.
:raises: ``nailgun.entity_mixins.TaskTimedOutError`` if the task
completes with any result other than "success".
:raises: ``nailgun.entity_mixins.TaskFailedError`` if the task finishes
with any result other than "success".
:raises: ``requests.exceptions.HTTPError`` If the API returns a message
with an HTTP 4XX or 5XX status code.
"""
# See nailgun.entity_mixins._poll_task for an explanation of why a
# private method is called.
return _poll_task(
self.id, # pylint:disable=no-member
self._server_config,
poll_rate,
timeout
) | [
"def",
"poll",
"(",
"self",
",",
"poll_rate",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"# See nailgun.entity_mixins._poll_task for an explanation of why a",
"# private method is called.",
"return",
"_poll_task",
"(",
"self",
".",
"id",
",",
"# pylint:disable=... | Return the status of a task or timeout.
There are several API calls that trigger asynchronous tasks, such as
synchronizing a repository, or publishing or promoting a content view.
It is possible to check on the status of a task if you know its UUID.
This method polls a task once every ``poll_rate`` seconds and, upon
task completion, returns information about that task.
:param poll_rate: Delay between the end of one task check-up and
the start of the next check-up. Defaults to
``nailgun.entity_mixins.TASK_POLL_RATE``.
:param timeout: Maximum number of seconds to wait until timing out.
Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``.
:returns: Information about the asynchronous task.
:raises: ``nailgun.entity_mixins.TaskTimedOutError`` if the task
completes with any result other than "success".
:raises: ``nailgun.entity_mixins.TaskFailedError`` if the task finishes
with any result other than "success".
:raises: ``requests.exceptions.HTTPError`` If the API returns a message
with an HTTP 4XX or 5XX status code. | [
"Return",
"the",
"status",
"of",
"a",
"task",
"or",
"timeout",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L3185-L3215 | train | 208,652 |
SatelliteQE/nailgun | nailgun/entities.py | HostGroup.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Deal with several bugs.
For more information, see:
* `Bugzilla #1235377
<https://bugzilla.redhat.com/show_bug.cgi?id=1235377>`_
* `Bugzilla #1235379
<https://bugzilla.redhat.com/show_bug.cgi?id=1235379>`_
* `Bugzilla #1450379
<https://bugzilla.redhat.com/show_bug.cgi?id=1450379>`_
"""
if ignore is None:
ignore = set()
ignore.add('root_pass')
ignore.add('kickstart_repository')
if attrs is None:
attrs = self.read_json()
attrs['parent_id'] = attrs.pop('ancestry') # either an ID or None
version = _get_version(self._server_config)
if version >= Version('6.1') and version < Version('6.2'):
# We cannot call `self.update_json([])`, as an ID might not be
# present on self. However, `attrs` is guaranteed to have an ID.
attrs2 = HostGroup(
self._server_config,
id=attrs['id']
).update_json([])
for attr in ('content_source_id',
'content_view_id',
'lifecycle_environment_id'):
attrs[attr] = attrs2.get(attr)
return super(HostGroup, self).read(entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Deal with several bugs.
For more information, see:
* `Bugzilla #1235377
<https://bugzilla.redhat.com/show_bug.cgi?id=1235377>`_
* `Bugzilla #1235379
<https://bugzilla.redhat.com/show_bug.cgi?id=1235379>`_
* `Bugzilla #1450379
<https://bugzilla.redhat.com/show_bug.cgi?id=1450379>`_
"""
if ignore is None:
ignore = set()
ignore.add('root_pass')
ignore.add('kickstart_repository')
if attrs is None:
attrs = self.read_json()
attrs['parent_id'] = attrs.pop('ancestry') # either an ID or None
version = _get_version(self._server_config)
if version >= Version('6.1') and version < Version('6.2'):
# We cannot call `self.update_json([])`, as an ID might not be
# present on self. However, `attrs` is guaranteed to have an ID.
attrs2 = HostGroup(
self._server_config,
id=attrs['id']
).update_json([])
for attr in ('content_source_id',
'content_view_id',
'lifecycle_environment_id'):
attrs[attr] = attrs2.get(attr)
return super(HostGroup, self).read(entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"ignore",
"is",
"None",
":",
"ignore",
"=",
"set",
"(",
")",
"ignore",
".",
"add",
"(",
"... | Deal with several bugs.
For more information, see:
* `Bugzilla #1235377
<https://bugzilla.redhat.com/show_bug.cgi?id=1235377>`_
* `Bugzilla #1235379
<https://bugzilla.redhat.com/show_bug.cgi?id=1235379>`_
* `Bugzilla #1450379
<https://bugzilla.redhat.com/show_bug.cgi?id=1450379>`_ | [
"Deal",
"with",
"several",
"bugs",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L3414-L3447 | train | 208,653 |
SatelliteQE/nailgun | nailgun/entities.py | HostGroup.delete_puppetclass | def delete_puppetclass(self, synchronous=True, **kwargs):
"""Remove a Puppet class from host group
Here is an example of how to use this method::
hostgroup.delete_puppetclass(data={'puppetclass_id': puppet.id})
Constructs path:
/api/hostgroups/:hostgroup_id/puppetclass_ids/:id
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy()
kwargs.update(self._server_config.get_client_kwargs())
path = "{0}/{1}".format(
self.path('puppetclass_ids'),
kwargs['data'].pop('puppetclass_id')
)
return _handle_response(
client.delete(path, **kwargs), self._server_config, synchronous) | python | def delete_puppetclass(self, synchronous=True, **kwargs):
"""Remove a Puppet class from host group
Here is an example of how to use this method::
hostgroup.delete_puppetclass(data={'puppetclass_id': puppet.id})
Constructs path:
/api/hostgroups/:hostgroup_id/puppetclass_ids/:id
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy()
kwargs.update(self._server_config.get_client_kwargs())
path = "{0}/{1}".format(
self.path('puppetclass_ids'),
kwargs['data'].pop('puppetclass_id')
)
return _handle_response(
client.delete(path, **kwargs), self._server_config, synchronous) | [
"def",
"delete_puppetclass",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"get_client_kwargs",
"(",
")",... | Remove a Puppet class from host group
Here is an example of how to use this method::
hostgroup.delete_puppetclass(data={'puppetclass_id': puppet.id})
Constructs path:
/api/hostgroups/:hostgroup_id/puppetclass_ids/:id
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Remove",
"a",
"Puppet",
"class",
"from",
"host",
"group"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L3516-L3541 | train | 208,654 |
SatelliteQE/nailgun | nailgun/entities.py | Host.owner_type | def owner_type(self, value):
"""Set ``owner_type`` to the given value.
In addition:
* Update the internal type of the ``owner`` field.
* Update the value of the ``owner`` field if a value is already set.
"""
self._owner_type = value
if value == 'User':
self._fields['owner'] = entity_fields.OneToOneField(User)
if hasattr(self, 'owner'):
# pylint:disable=no-member
self.owner = User(
self._server_config,
id=self.owner.id if isinstance(self.owner, Entity)
else self.owner
)
elif value == 'Usergroup':
self._fields['owner'] = entity_fields.OneToOneField(UserGroup)
if hasattr(self, 'owner'):
# pylint:disable=no-member
self.owner = UserGroup(
self._server_config,
id=self.owner.id if isinstance(self.owner, Entity)
else self.owner
) | python | def owner_type(self, value):
"""Set ``owner_type`` to the given value.
In addition:
* Update the internal type of the ``owner`` field.
* Update the value of the ``owner`` field if a value is already set.
"""
self._owner_type = value
if value == 'User':
self._fields['owner'] = entity_fields.OneToOneField(User)
if hasattr(self, 'owner'):
# pylint:disable=no-member
self.owner = User(
self._server_config,
id=self.owner.id if isinstance(self.owner, Entity)
else self.owner
)
elif value == 'Usergroup':
self._fields['owner'] = entity_fields.OneToOneField(UserGroup)
if hasattr(self, 'owner'):
# pylint:disable=no-member
self.owner = UserGroup(
self._server_config,
id=self.owner.id if isinstance(self.owner, Entity)
else self.owner
) | [
"def",
"owner_type",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"_owner_type",
"=",
"value",
"if",
"value",
"==",
"'User'",
":",
"self",
".",
"_fields",
"[",
"'owner'",
"]",
"=",
"entity_fields",
".",
"OneToOneField",
"(",
"User",
")",
"if",
"ha... | Set ``owner_type`` to the given value.
In addition:
* Update the internal type of the ``owner`` field.
* Update the value of the ``owner`` field if a value is already set. | [
"Set",
"owner_type",
"to",
"the",
"given",
"value",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L3774-L3800 | train | 208,655 |
SatelliteQE/nailgun | nailgun/entities.py | Host.get_values | def get_values(self):
"""Correctly set the ``owner_type`` attribute."""
attrs = super(Host, self).get_values()
if '_owner_type' in attrs and attrs['_owner_type'] is not None:
attrs['owner_type'] = attrs.pop('_owner_type')
else:
attrs.pop('_owner_type')
return attrs | python | def get_values(self):
"""Correctly set the ``owner_type`` attribute."""
attrs = super(Host, self).get_values()
if '_owner_type' in attrs and attrs['_owner_type'] is not None:
attrs['owner_type'] = attrs.pop('_owner_type')
else:
attrs.pop('_owner_type')
return attrs | [
"def",
"get_values",
"(",
"self",
")",
":",
"attrs",
"=",
"super",
"(",
"Host",
",",
"self",
")",
".",
"get_values",
"(",
")",
"if",
"'_owner_type'",
"in",
"attrs",
"and",
"attrs",
"[",
"'_owner_type'",
"]",
"is",
"not",
"None",
":",
"attrs",
"[",
"'... | Correctly set the ``owner_type`` attribute. | [
"Correctly",
"set",
"the",
"owner_type",
"attribute",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L3802-L3809 | train | 208,656 |
SatelliteQE/nailgun | nailgun/entities.py | Host.errata_applicability | def errata_applicability(self, synchronous=True, **kwargs):
"""Force regenerate errata applicability
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all content decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(self.path('errata/applicability'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | python | def errata_applicability(self, synchronous=True, **kwargs):
"""Force regenerate errata applicability
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all content decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(self.path('errata/applicability'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | [
"def",
"errata_applicability",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",... | Force regenerate errata applicability
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all content decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Force",
"regenerate",
"errata",
"applicability"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L4017-L4032 | train | 208,657 |
SatelliteQE/nailgun | nailgun/entities.py | Host.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Deal with oddly named and structured data returned by the server.
For more information, see `Bugzilla #1235019
<https://bugzilla.redhat.com/show_bug.cgi?id=1235019>`_
and `Bugzilla #1449749
<https://bugzilla.redhat.com/show_bug.cgi?id=1449749>`_.
`content_facet_attributes` are returned only in case any of facet
attributes were actually set.
Also add image to the response if needed, as
:meth:`nailgun.entity_mixins.EntityReadMixin.read` can't initialize
image.
"""
if attrs is None:
attrs = self.read_json()
if ignore is None:
ignore = set()
if 'parameters' in attrs:
attrs['host_parameters_attributes'] = attrs.pop('parameters')
else:
ignore.add('host_parameters_attributes')
if 'content_facet_attributes' not in attrs:
ignore.add('content_facet_attributes')
ignore.add('compute_attributes')
ignore.add('interfaces_attributes')
ignore.add('root_pass')
# Image entity requires compute_resource_id to initialize as it is
# part of its path. The thing is that entity_mixins.read() initializes
# entities by id only.
# Workaround is to add image to ignore, call entity_mixins.read()
# and then add 'manually' initialized image to the result.
# If image_id is None set image to None as it is done by default.
ignore.add('image')
# host id is required for interface initialization
ignore.add('interface')
ignore.add('build_status_label')
result = super(Host, self).read(entity, attrs, ignore, params)
if attrs.get('image_id'):
result.image = Image(
server_config=self._server_config,
id=attrs.get('image_id'),
compute_resource=attrs.get('compute_resource_id'),
)
else:
result.image = None
if 'interfaces' in attrs and attrs['interfaces']:
result.interface = [
Interface(
self._server_config,
host=result.id,
id=interface['id'],
)
for interface in attrs['interfaces']
]
if 'build_status_label' in attrs:
result.build_status_label = attrs['build_status_label']
return result | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Deal with oddly named and structured data returned by the server.
For more information, see `Bugzilla #1235019
<https://bugzilla.redhat.com/show_bug.cgi?id=1235019>`_
and `Bugzilla #1449749
<https://bugzilla.redhat.com/show_bug.cgi?id=1449749>`_.
`content_facet_attributes` are returned only in case any of facet
attributes were actually set.
Also add image to the response if needed, as
:meth:`nailgun.entity_mixins.EntityReadMixin.read` can't initialize
image.
"""
if attrs is None:
attrs = self.read_json()
if ignore is None:
ignore = set()
if 'parameters' in attrs:
attrs['host_parameters_attributes'] = attrs.pop('parameters')
else:
ignore.add('host_parameters_attributes')
if 'content_facet_attributes' not in attrs:
ignore.add('content_facet_attributes')
ignore.add('compute_attributes')
ignore.add('interfaces_attributes')
ignore.add('root_pass')
# Image entity requires compute_resource_id to initialize as it is
# part of its path. The thing is that entity_mixins.read() initializes
# entities by id only.
# Workaround is to add image to ignore, call entity_mixins.read()
# and then add 'manually' initialized image to the result.
# If image_id is None set image to None as it is done by default.
ignore.add('image')
# host id is required for interface initialization
ignore.add('interface')
ignore.add('build_status_label')
result = super(Host, self).read(entity, attrs, ignore, params)
if attrs.get('image_id'):
result.image = Image(
server_config=self._server_config,
id=attrs.get('image_id'),
compute_resource=attrs.get('compute_resource_id'),
)
else:
result.image = None
if 'interfaces' in attrs and attrs['interfaces']:
result.interface = [
Interface(
self._server_config,
host=result.id,
id=interface['id'],
)
for interface in attrs['interfaces']
]
if 'build_status_label' in attrs:
result.build_status_label = attrs['build_status_label']
return result | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"attrs",
"is",
"None",
":",
"attrs",
"=",
"self",
".",
"read_json",
"(",
")",
"if",
"ignore... | Deal with oddly named and structured data returned by the server.
For more information, see `Bugzilla #1235019
<https://bugzilla.redhat.com/show_bug.cgi?id=1235019>`_
and `Bugzilla #1449749
<https://bugzilla.redhat.com/show_bug.cgi?id=1449749>`_.
`content_facet_attributes` are returned only in case any of facet
attributes were actually set.
Also add image to the response if needed, as
:meth:`nailgun.entity_mixins.EntityReadMixin.read` can't initialize
image. | [
"Deal",
"with",
"oddly",
"named",
"and",
"structured",
"data",
"returned",
"by",
"the",
"server",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L4102-L4160 | train | 208,658 |
SatelliteQE/nailgun | nailgun/entities.py | LifecycleEnvironment.create_payload | def create_payload(self):
"""Rename the payload key "prior_id" to "prior".
For more information, see `Bugzilla #1238757
<https://bugzilla.redhat.com/show_bug.cgi?id=1238757>`_.
"""
payload = super(LifecycleEnvironment, self).create_payload()
if (_get_version(self._server_config) < Version('6.1') and
'prior_id' in payload):
payload['prior'] = payload.pop('prior_id')
return payload | python | def create_payload(self):
"""Rename the payload key "prior_id" to "prior".
For more information, see `Bugzilla #1238757
<https://bugzilla.redhat.com/show_bug.cgi?id=1238757>`_.
"""
payload = super(LifecycleEnvironment, self).create_payload()
if (_get_version(self._server_config) < Version('6.1') and
'prior_id' in payload):
payload['prior'] = payload.pop('prior_id')
return payload | [
"def",
"create_payload",
"(",
"self",
")",
":",
"payload",
"=",
"super",
"(",
"LifecycleEnvironment",
",",
"self",
")",
".",
"create_payload",
"(",
")",
"if",
"(",
"_get_version",
"(",
"self",
".",
"_server_config",
")",
"<",
"Version",
"(",
"'6.1'",
")",
... | Rename the payload key "prior_id" to "prior".
For more information, see `Bugzilla #1238757
<https://bugzilla.redhat.com/show_bug.cgi?id=1238757>`_. | [
"Rename",
"the",
"payload",
"key",
"prior_id",
"to",
"prior",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L4588-L4599 | train | 208,659 |
SatelliteQE/nailgun | nailgun/entities.py | LifecycleEnvironment.create_missing | def create_missing(self):
"""Automatically populate additional instance attributes.
When a new lifecycle environment is created, it must either:
* Reference a parent lifecycle environment in the tree of lifecycle
environments via the ``prior`` field, or
* have a name of "Library".
Within a given organization, there can only be a single lifecycle
environment with a name of 'Library'. This lifecycle environment is at
the root of a tree of lifecycle environments, so its ``prior`` field is
blank.
This method finds the 'Library' lifecycle environment within the
current organization and points to it via the ``prior`` field. This is
not done if the current lifecycle environment has a name of 'Library'.
"""
# We call `super` first b/c it populates `self.organization`, and we
# need that field to perform a search a little later.
super(LifecycleEnvironment, self).create_missing()
if (self.name != 'Library' and # pylint:disable=no-member
not hasattr(self, 'prior')):
results = self.search({'organization'}, {u'name': u'Library'})
if len(results) != 1:
raise APIResponseError(
u'Could not find the "Library" lifecycle environment for '
u'organization {0}. Search results: {1}'
.format(self.organization, results) # pylint:disable=E1101
)
self.prior = results[0] | python | def create_missing(self):
"""Automatically populate additional instance attributes.
When a new lifecycle environment is created, it must either:
* Reference a parent lifecycle environment in the tree of lifecycle
environments via the ``prior`` field, or
* have a name of "Library".
Within a given organization, there can only be a single lifecycle
environment with a name of 'Library'. This lifecycle environment is at
the root of a tree of lifecycle environments, so its ``prior`` field is
blank.
This method finds the 'Library' lifecycle environment within the
current organization and points to it via the ``prior`` field. This is
not done if the current lifecycle environment has a name of 'Library'.
"""
# We call `super` first b/c it populates `self.organization`, and we
# need that field to perform a search a little later.
super(LifecycleEnvironment, self).create_missing()
if (self.name != 'Library' and # pylint:disable=no-member
not hasattr(self, 'prior')):
results = self.search({'organization'}, {u'name': u'Library'})
if len(results) != 1:
raise APIResponseError(
u'Could not find the "Library" lifecycle environment for '
u'organization {0}. Search results: {1}'
.format(self.organization, results) # pylint:disable=E1101
)
self.prior = results[0] | [
"def",
"create_missing",
"(",
"self",
")",
":",
"# We call `super` first b/c it populates `self.organization`, and we",
"# need that field to perform a search a little later.",
"super",
"(",
"LifecycleEnvironment",
",",
"self",
")",
".",
"create_missing",
"(",
")",
"if",
"(",
... | Automatically populate additional instance attributes.
When a new lifecycle environment is created, it must either:
* Reference a parent lifecycle environment in the tree of lifecycle
environments via the ``prior`` field, or
* have a name of "Library".
Within a given organization, there can only be a single lifecycle
environment with a name of 'Library'. This lifecycle environment is at
the root of a tree of lifecycle environments, so its ``prior`` field is
blank.
This method finds the 'Library' lifecycle environment within the
current organization and points to it via the ``prior`` field. This is
not done if the current lifecycle environment has a name of 'Library'. | [
"Automatically",
"populate",
"additional",
"instance",
"attributes",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L4601-L4632 | train | 208,660 |
SatelliteQE/nailgun | nailgun/entities.py | Media.create_payload | def create_payload(self):
"""Wrap submitted data within an extra dict and rename ``path_``.
For more information on wrapping submitted data, see `Bugzilla #1151220
<https://bugzilla.redhat.com/show_bug.cgi?id=1151220>`_.
"""
payload = super(Media, self).create_payload()
if 'path_' in payload:
payload['path'] = payload.pop('path_')
return {u'medium': payload} | python | def create_payload(self):
"""Wrap submitted data within an extra dict and rename ``path_``.
For more information on wrapping submitted data, see `Bugzilla #1151220
<https://bugzilla.redhat.com/show_bug.cgi?id=1151220>`_.
"""
payload = super(Media, self).create_payload()
if 'path_' in payload:
payload['path'] = payload.pop('path_')
return {u'medium': payload} | [
"def",
"create_payload",
"(",
"self",
")",
":",
"payload",
"=",
"super",
"(",
"Media",
",",
"self",
")",
".",
"create_payload",
"(",
")",
"if",
"'path_'",
"in",
"payload",
":",
"payload",
"[",
"'path'",
"]",
"=",
"payload",
".",
"pop",
"(",
"'path_'",
... | Wrap submitted data within an extra dict and rename ``path_``.
For more information on wrapping submitted data, see `Bugzilla #1151220
<https://bugzilla.redhat.com/show_bug.cgi?id=1151220>`_. | [
"Wrap",
"submitted",
"data",
"within",
"an",
"extra",
"dict",
"and",
"rename",
"path_",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L4756-L4766 | train | 208,661 |
SatelliteQE/nailgun | nailgun/entities.py | OverrideValue.create_payload | def create_payload(self):
"""Remove ``smart_class_parameter_id`` or ``smart_variable_id``"""
payload = super(OverrideValue, self).create_payload()
if hasattr(self, 'smart_class_parameter'):
del payload['smart_class_parameter_id']
if hasattr(self, 'smart_variable'):
del payload['smart_variable_id']
return payload | python | def create_payload(self):
"""Remove ``smart_class_parameter_id`` or ``smart_variable_id``"""
payload = super(OverrideValue, self).create_payload()
if hasattr(self, 'smart_class_parameter'):
del payload['smart_class_parameter_id']
if hasattr(self, 'smart_variable'):
del payload['smart_variable_id']
return payload | [
"def",
"create_payload",
"(",
"self",
")",
":",
"payload",
"=",
"super",
"(",
"OverrideValue",
",",
"self",
")",
".",
"create_payload",
"(",
")",
"if",
"hasattr",
"(",
"self",
",",
"'smart_class_parameter'",
")",
":",
"del",
"payload",
"[",
"'smart_class_par... | Remove ``smart_class_parameter_id`` or ``smart_variable_id`` | [
"Remove",
"smart_class_parameter_id",
"or",
"smart_variable_id"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L5220-L5227 | train | 208,662 |
SatelliteQE/nailgun | nailgun/entities.py | Parameter.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore path related fields as they're never returned by the server
and are only added to entity to be able to use proper path.
"""
if entity is None:
entity = type(self)(
self._server_config,
**{self._parent_type: self._parent_id}
)
if ignore is None:
ignore = set()
for field_name in self._path_fields:
ignore.add(field_name)
return super(Parameter, self).read(entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore path related fields as they're never returned by the server
and are only added to entity to be able to use proper path.
"""
if entity is None:
entity = type(self)(
self._server_config,
**{self._parent_type: self._parent_id}
)
if ignore is None:
ignore = set()
for field_name in self._path_fields:
ignore.add(field_name)
return super(Parameter, self).read(entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"entity",
"is",
"None",
":",
"entity",
"=",
"type",
"(",
"self",
")",
"(",
"self",
".",
"... | Ignore path related fields as they're never returned by the server
and are only added to entity to be able to use proper path. | [
"Ignore",
"path",
"related",
"fields",
"as",
"they",
"re",
"never",
"returned",
"by",
"the",
"server",
"and",
"are",
"only",
"added",
"to",
"entity",
"to",
"be",
"able",
"to",
"use",
"proper",
"path",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L5312-L5325 | train | 208,663 |
SatelliteQE/nailgun | nailgun/entities.py | Product.search | def search(self, fields=None, query=None, filters=None):
"""Search for entities with missing attribute
:param fields: A set naming which fields should be used when generating
a search query. If ``None``, all values on the entity are used. If
an empty set, no values are used.
:param query: A dict containing a raw search query. This is melded in
to the generated search query like so: ``{generated:
query}.update({manual: query})``.
:param filters: A dict. Used to filter search results locally.
:return: A list of entities, all of type ``type(self)``.
For more information, see `Bugzilla #1237283
<https://bugzilla.redhat.com/show_bug.cgi?id=1237283>`_ and
`nailgun#261 <https://github.com/SatelliteQE/nailgun/issues/261>`_.
"""
results = self.search_json(fields, query)['results']
results = self.search_normalize(results)
entities = []
for result in results:
sync_plan = result.get('sync_plan')
if sync_plan is not None:
del result['sync_plan']
entity = type(self)(self._server_config, **result)
if sync_plan:
entity.sync_plan = SyncPlan(
server_config=self._server_config,
id=sync_plan,
organization=Organization(
server_config=self._server_config,
id=result.get('organization')
),
)
entities.append(entity)
if filters is not None:
entities = self.search_filter(entities, filters)
return entities | python | def search(self, fields=None, query=None, filters=None):
"""Search for entities with missing attribute
:param fields: A set naming which fields should be used when generating
a search query. If ``None``, all values on the entity are used. If
an empty set, no values are used.
:param query: A dict containing a raw search query. This is melded in
to the generated search query like so: ``{generated:
query}.update({manual: query})``.
:param filters: A dict. Used to filter search results locally.
:return: A list of entities, all of type ``type(self)``.
For more information, see `Bugzilla #1237283
<https://bugzilla.redhat.com/show_bug.cgi?id=1237283>`_ and
`nailgun#261 <https://github.com/SatelliteQE/nailgun/issues/261>`_.
"""
results = self.search_json(fields, query)['results']
results = self.search_normalize(results)
entities = []
for result in results:
sync_plan = result.get('sync_plan')
if sync_plan is not None:
del result['sync_plan']
entity = type(self)(self._server_config, **result)
if sync_plan:
entity.sync_plan = SyncPlan(
server_config=self._server_config,
id=sync_plan,
organization=Organization(
server_config=self._server_config,
id=result.get('organization')
),
)
entities.append(entity)
if filters is not None:
entities = self.search_filter(entities, filters)
return entities | [
"def",
"search",
"(",
"self",
",",
"fields",
"=",
"None",
",",
"query",
"=",
"None",
",",
"filters",
"=",
"None",
")",
":",
"results",
"=",
"self",
".",
"search_json",
"(",
"fields",
",",
"query",
")",
"[",
"'results'",
"]",
"results",
"=",
"self",
... | Search for entities with missing attribute
:param fields: A set naming which fields should be used when generating
a search query. If ``None``, all values on the entity are used. If
an empty set, no values are used.
:param query: A dict containing a raw search query. This is melded in
to the generated search query like so: ``{generated:
query}.update({manual: query})``.
:param filters: A dict. Used to filter search results locally.
:return: A list of entities, all of type ``type(self)``.
For more information, see `Bugzilla #1237283
<https://bugzilla.redhat.com/show_bug.cgi?id=1237283>`_ and
`nailgun#261 <https://github.com/SatelliteQE/nailgun/issues/261>`_. | [
"Search",
"for",
"entities",
"with",
"missing",
"attribute"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L5444-L5480 | train | 208,664 |
SatelliteQE/nailgun | nailgun/entities.py | Registry.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Do not read the ``password`` argument."""
if attrs is None:
attrs = self.read_json()
if ignore is None:
ignore = set()
ignore.add('password')
return super(Registry, self).read(entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Do not read the ``password`` argument."""
if attrs is None:
attrs = self.read_json()
if ignore is None:
ignore = set()
ignore.add('password')
return super(Registry, self).read(entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"attrs",
"is",
"None",
":",
"attrs",
"=",
"self",
".",
"read_json",
"(",
")",
"if",
"ignore... | Do not read the ``password`` argument. | [
"Do",
"not",
"read",
"the",
"password",
"argument",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L5886-L5893 | train | 208,665 |
SatelliteQE/nailgun | nailgun/entities.py | Repository.create_missing | def create_missing(self):
"""Conditionally mark ``docker_upstream_name`` as required.
Mark ``docker_upstream_name`` as required if ``content_type`` is
"docker".
"""
if getattr(self, 'content_type', '') == 'docker':
self._fields['docker_upstream_name'].required = True
super(Repository, self).create_missing() | python | def create_missing(self):
"""Conditionally mark ``docker_upstream_name`` as required.
Mark ``docker_upstream_name`` as required if ``content_type`` is
"docker".
"""
if getattr(self, 'content_type', '') == 'docker':
self._fields['docker_upstream_name'].required = True
super(Repository, self).create_missing() | [
"def",
"create_missing",
"(",
"self",
")",
":",
"if",
"getattr",
"(",
"self",
",",
"'content_type'",
",",
"''",
")",
"==",
"'docker'",
":",
"self",
".",
"_fields",
"[",
"'docker_upstream_name'",
"]",
".",
"required",
"=",
"True",
"super",
"(",
"Repository"... | Conditionally mark ``docker_upstream_name`` as required.
Mark ``docker_upstream_name`` as required if ``content_type`` is
"docker". | [
"Conditionally",
"mark",
"docker_upstream_name",
"as",
"required",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L6049-L6058 | train | 208,666 |
SatelliteQE/nailgun | nailgun/entities.py | Repository.upload_content | def upload_content(self, synchronous=True, **kwargs):
"""Upload a file or files to the current repository.
Here is an example of how to upload content::
with open('my_content.rpm') as content:
repo.upload_content(files={'content': content})
This method accepts the same keyword arguments as Requests. As a
result, the following examples can be adapted for use here:
* `POST a Multipart-Encoded File`_
* `POST Multiple Multipart-Encoded Files`_
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
:raises nailgun.entities.APIResponseError: If the response has a status
other than "success".
.. _POST a Multipart-Encoded File:
http://docs.python-requests.org/en/latest/user/quickstart/#post-a-multipart-encoded-file
.. _POST Multiple Multipart-Encoded Files:
http://docs.python-requests.org/en/latest/user/advanced/#post-multiple-multipart-encoded-files
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.post(self.path('upload_content'), **kwargs)
json = _handle_response(response, self._server_config, synchronous)
if json['status'] != 'success':
raise APIResponseError(
# pylint:disable=no-member
'Received error when uploading file {0} to repository {1}: {2}'
.format(kwargs.get('files'), self.id, json)
)
return json | python | def upload_content(self, synchronous=True, **kwargs):
"""Upload a file or files to the current repository.
Here is an example of how to upload content::
with open('my_content.rpm') as content:
repo.upload_content(files={'content': content})
This method accepts the same keyword arguments as Requests. As a
result, the following examples can be adapted for use here:
* `POST a Multipart-Encoded File`_
* `POST Multiple Multipart-Encoded Files`_
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
:raises nailgun.entities.APIResponseError: If the response has a status
other than "success".
.. _POST a Multipart-Encoded File:
http://docs.python-requests.org/en/latest/user/quickstart/#post-a-multipart-encoded-file
.. _POST Multiple Multipart-Encoded Files:
http://docs.python-requests.org/en/latest/user/advanced/#post-multiple-multipart-encoded-files
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.post(self.path('upload_content'), **kwargs)
json = _handle_response(response, self._server_config, synchronous)
if json['status'] != 'success':
raise APIResponseError(
# pylint:disable=no-member
'Received error when uploading file {0} to repository {1}: {2}'
.format(kwargs.get('files'), self.id, json)
)
return json | [
"def",
"upload_content",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"ge... | Upload a file or files to the current repository.
Here is an example of how to upload content::
with open('my_content.rpm') as content:
repo.upload_content(files={'content': content})
This method accepts the same keyword arguments as Requests. As a
result, the following examples can be adapted for use here:
* `POST a Multipart-Encoded File`_
* `POST Multiple Multipart-Encoded Files`_
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
:raises nailgun.entities.APIResponseError: If the response has a status
other than "success".
.. _POST a Multipart-Encoded File:
http://docs.python-requests.org/en/latest/user/quickstart/#post-a-multipart-encoded-file
.. _POST Multiple Multipart-Encoded Files:
http://docs.python-requests.org/en/latest/user/advanced/#post-multiple-multipart-encoded-files | [
"Upload",
"a",
"file",
"or",
"files",
"to",
"the",
"current",
"repository",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L6094-L6134 | train | 208,667 |
SatelliteQE/nailgun | nailgun/entities.py | Repository.import_uploads | def import_uploads(self, uploads=None, upload_ids=None, synchronous=True,
**kwargs):
"""Import uploads into a repository
It expects either a list of uploads or upload_ids (but not both).
:param uploads: Array of uploads to be imported
:param upload_ids: Array of upload ids to be imported
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
if uploads:
data = {'uploads': uploads}
elif upload_ids:
data = {'upload_ids': upload_ids}
response = client.put(self.path('import_uploads'), data, **kwargs)
json = _handle_response(response, self._server_config, synchronous)
return json | python | def import_uploads(self, uploads=None, upload_ids=None, synchronous=True,
**kwargs):
"""Import uploads into a repository
It expects either a list of uploads or upload_ids (but not both).
:param uploads: Array of uploads to be imported
:param upload_ids: Array of upload ids to be imported
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
if uploads:
data = {'uploads': uploads}
elif upload_ids:
data = {'upload_ids': upload_ids}
response = client.put(self.path('import_uploads'), data, **kwargs)
json = _handle_response(response, self._server_config, synchronous)
return json | [
"def",
"import_uploads",
"(",
"self",
",",
"uploads",
"=",
"None",
",",
"upload_ids",
"=",
"None",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs"... | Import uploads into a repository
It expects either a list of uploads or upload_ids (but not both).
:param uploads: Array of uploads to be imported
:param upload_ids: Array of upload ids to be imported
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Import",
"uploads",
"into",
"a",
"repository"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L6136-L6161 | train | 208,668 |
SatelliteQE/nailgun | nailgun/entities.py | RepositorySet.available_repositories | def available_repositories(self, **kwargs):
"""Lists available repositories for the repository set
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
if 'data' not in kwargs:
kwargs['data'] = dict()
kwargs['data']['product_id'] = self.product.id
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.get(self.path('available_repositories'), **kwargs)
return _handle_response(response, self._server_config) | python | def available_repositories(self, **kwargs):
"""Lists available repositories for the repository set
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
if 'data' not in kwargs:
kwargs['data'] = dict()
kwargs['data']['product_id'] = self.product.id
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.get(self.path('available_repositories'), **kwargs)
return _handle_response(response, self._server_config) | [
"def",
"available_repositories",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'data'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'data'",
"]",
"=",
"dict",
"(",
")",
"kwargs",
"[",
"'data'",
"]",
"[",
"'product_id'",
"]",
"=",
"self",
"."... | Lists available repositories for the repository set
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Lists",
"available",
"repositories",
"for",
"the",
"repository",
"set"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L6289-L6307 | train | 208,669 |
SatelliteQE/nailgun | nailgun/entities.py | RepositorySet.enable | def enable(self, synchronous=True, **kwargs):
"""Enables the RedHat Repository
RedHat Repos needs to be enabled first, so that we can sync it.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
if 'data' not in kwargs:
kwargs['data'] = dict()
kwargs['data']['product_id'] = self.product.id
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(self.path('enable'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | python | def enable(self, synchronous=True, **kwargs):
"""Enables the RedHat Repository
RedHat Repos needs to be enabled first, so that we can sync it.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
if 'data' not in kwargs:
kwargs['data'] = dict()
kwargs['data']['product_id'] = self.product.id
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(self.path('enable'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | [
"def",
"enable",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'data'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'data'",
"]",
"=",
"dict",
"(",
")",
"kwargs",
"[",
"'data'",
"]",
"[",
"'product_id'",
"]... | Enables the RedHat Repository
RedHat Repos needs to be enabled first, so that we can sync it.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Enables",
"the",
"RedHat",
"Repository"
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L6309-L6329 | train | 208,670 |
SatelliteQE/nailgun | nailgun/entities.py | SmartProxy.import_puppetclasses | def import_puppetclasses(self, synchronous=True, **kwargs):
"""Import puppet classes from puppet Capsule.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy()
kwargs.update(self._server_config.get_client_kwargs())
# Check if environment_id was sent and substitute it to the path
# but do not pass it to requests
if 'environment' in kwargs:
if isinstance(kwargs['environment'], Environment):
environment_id = kwargs.pop('environment').id
else:
environment_id = kwargs.pop('environment')
path = '{0}/environments/{1}/import_puppetclasses'.format(
self.path(), environment_id)
else:
path = '{0}/import_puppetclasses'.format(self.path())
return _handle_response(
client.post(path, **kwargs), self._server_config, synchronous) | python | def import_puppetclasses(self, synchronous=True, **kwargs):
"""Import puppet classes from puppet Capsule.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy()
kwargs.update(self._server_config.get_client_kwargs())
# Check if environment_id was sent and substitute it to the path
# but do not pass it to requests
if 'environment' in kwargs:
if isinstance(kwargs['environment'], Environment):
environment_id = kwargs.pop('environment').id
else:
environment_id = kwargs.pop('environment')
path = '{0}/environments/{1}/import_puppetclasses'.format(
self.path(), environment_id)
else:
path = '{0}/import_puppetclasses'.format(self.path())
return _handle_response(
client.post(path, **kwargs), self._server_config, synchronous) | [
"def",
"import_puppetclasses",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"get_client_kwargs",
"(",
")... | Import puppet classes from puppet Capsule.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Import",
"puppet",
"classes",
"from",
"puppet",
"Capsule",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L6680-L6706 | train | 208,671 |
SatelliteQE/nailgun | nailgun/entities.py | Subscription._org_path | def _org_path(self, which, payload):
"""A helper method for generating paths with organization IDs in them.
:param which: A path such as "manifest_history" that has an
organization ID in it.
:param payload: A dict with an "organization_id" key in it.
:returns: A string. The requested path.
"""
return Subscription(
self._server_config,
organization=payload['organization_id'],
).path(which) | python | def _org_path(self, which, payload):
"""A helper method for generating paths with organization IDs in them.
:param which: A path such as "manifest_history" that has an
organization ID in it.
:param payload: A dict with an "organization_id" key in it.
:returns: A string. The requested path.
"""
return Subscription(
self._server_config,
organization=payload['organization_id'],
).path(which) | [
"def",
"_org_path",
"(",
"self",
",",
"which",
",",
"payload",
")",
":",
"return",
"Subscription",
"(",
"self",
".",
"_server_config",
",",
"organization",
"=",
"payload",
"[",
"'organization_id'",
"]",
",",
")",
".",
"path",
"(",
"which",
")"
] | A helper method for generating paths with organization IDs in them.
:param which: A path such as "manifest_history" that has an
organization ID in it.
:param payload: A dict with an "organization_id" key in it.
:returns: A string. The requested path. | [
"A",
"helper",
"method",
"for",
"generating",
"paths",
"with",
"organization",
"IDs",
"in",
"them",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L7071-L7083 | train | 208,672 |
SatelliteQE/nailgun | nailgun/entities.py | Subscription.manifest_history | def manifest_history(self, synchronous=True, **kwargs):
"""Obtain manifest history for subscriptions.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.get(
self._org_path('manifest_history', kwargs['data']),
**kwargs
)
return _handle_response(response, self._server_config, synchronous) | python | def manifest_history(self, synchronous=True, **kwargs):
"""Obtain manifest history for subscriptions.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.get(
self._org_path('manifest_history', kwargs['data']),
**kwargs
)
return _handle_response(response, self._server_config, synchronous) | [
"def",
"manifest_history",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"... | Obtain manifest history for subscriptions.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Obtain",
"manifest",
"history",
"for",
"subscriptions",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L7110-L7128 | train | 208,673 |
SatelliteQE/nailgun | nailgun/entities.py | Subscription.read | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore ``organization`` field as it's never returned by the server
and is only added to entity to be able to use organization path
dependent helpers.
"""
if ignore is None:
ignore = set()
ignore.add('organization')
return super(Subscription, self).read(entity, attrs, ignore, params) | python | def read(self, entity=None, attrs=None, ignore=None, params=None):
"""Ignore ``organization`` field as it's never returned by the server
and is only added to entity to be able to use organization path
dependent helpers.
"""
if ignore is None:
ignore = set()
ignore.add('organization')
return super(Subscription, self).read(entity, attrs, ignore, params) | [
"def",
"read",
"(",
"self",
",",
"entity",
"=",
"None",
",",
"attrs",
"=",
"None",
",",
"ignore",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"if",
"ignore",
"is",
"None",
":",
"ignore",
"=",
"set",
"(",
")",
"ignore",
".",
"add",
"(",
"... | Ignore ``organization`` field as it's never returned by the server
and is only added to entity to be able to use organization path
dependent helpers. | [
"Ignore",
"organization",
"field",
"as",
"it",
"s",
"never",
"returned",
"by",
"the",
"server",
"and",
"is",
"only",
"added",
"to",
"entity",
"to",
"be",
"able",
"to",
"use",
"organization",
"path",
"dependent",
"helpers",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L7130-L7138 | train | 208,674 |
SatelliteQE/nailgun | nailgun/entities.py | Subscription.refresh_manifest | def refresh_manifest(self, synchronous=True, **kwargs):
"""Refresh previously imported manifest for Red Hat provider.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(
self._org_path('refresh_manifest', kwargs['data']),
**kwargs
)
return _handle_response(
response,
self._server_config,
synchronous,
timeout=1500,
) | python | def refresh_manifest(self, synchronous=True, **kwargs):
"""Refresh previously imported manifest for Red Hat provider.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.put(
self._org_path('refresh_manifest', kwargs['data']),
**kwargs
)
return _handle_response(
response,
self._server_config,
synchronous,
timeout=1500,
) | [
"def",
"refresh_manifest",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"... | Refresh previously imported manifest for Red Hat provider.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Refresh",
"previously",
"imported",
"manifest",
"for",
"Red",
"Hat",
"provider",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L7140-L7163 | train | 208,675 |
SatelliteQE/nailgun | nailgun/entities.py | Subscription.upload | def upload(self, synchronous=True, **kwargs):
"""Upload a subscription manifest.
Here is an example of how to use this method::
with open('my_manifest.zip') as manifest:
sub.upload({'organization_id': org.id}, manifest)
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.post(
self._org_path('upload', kwargs['data']),
**kwargs
)
# Setting custom timeout as manifest upload can take enormously huge
# amount of time. See BZ#1339696 for more details
return _handle_response(
response,
self._server_config,
synchronous,
timeout=1500,
) | python | def upload(self, synchronous=True, **kwargs):
"""Upload a subscription manifest.
Here is an example of how to use this method::
with open('my_manifest.zip') as manifest:
sub.upload({'organization_id': org.id}, manifest)
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.post(
self._org_path('upload', kwargs['data']),
**kwargs
)
# Setting custom timeout as manifest upload can take enormously huge
# amount of time. See BZ#1339696 for more details
return _handle_response(
response,
self._server_config,
synchronous,
timeout=1500,
) | [
"def",
"upload",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"get_client... | Upload a subscription manifest.
Here is an example of how to use this method::
with open('my_manifest.zip') as manifest:
sub.upload({'organization_id': org.id}, manifest)
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Upload",
"a",
"subscription",
"manifest",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L7165-L7195 | train | 208,676 |
SatelliteQE/nailgun | nailgun/entities.py | SyncPlan.create_payload | def create_payload(self):
"""Convert ``sync_date`` to a string.
The ``sync_date`` instance attribute on the current object is not
affected. However, the ``'sync_date'`` key in the dict returned by
``create_payload`` is a string.
"""
data = super(SyncPlan, self).create_payload()
if isinstance(data.get('sync_date'), datetime):
data['sync_date'] = data['sync_date'].strftime('%Y-%m-%d %H:%M:%S')
return data | python | def create_payload(self):
"""Convert ``sync_date`` to a string.
The ``sync_date`` instance attribute on the current object is not
affected. However, the ``'sync_date'`` key in the dict returned by
``create_payload`` is a string.
"""
data = super(SyncPlan, self).create_payload()
if isinstance(data.get('sync_date'), datetime):
data['sync_date'] = data['sync_date'].strftime('%Y-%m-%d %H:%M:%S')
return data | [
"def",
"create_payload",
"(",
"self",
")",
":",
"data",
"=",
"super",
"(",
"SyncPlan",
",",
"self",
")",
".",
"create_payload",
"(",
")",
"if",
"isinstance",
"(",
"data",
".",
"get",
"(",
"'sync_date'",
")",
",",
"datetime",
")",
":",
"data",
"[",
"'... | Convert ``sync_date`` to a string.
The ``sync_date`` instance attribute on the current object is not
affected. However, the ``'sync_date'`` key in the dict returned by
``create_payload`` is a string. | [
"Convert",
"sync_date",
"to",
"a",
"string",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L7273-L7284 | train | 208,677 |
SatelliteQE/nailgun | nailgun/entities.py | SyncPlan.update_payload | def update_payload(self, fields=None):
"""Convert ``sync_date`` to a string if datetime object provided."""
data = super(SyncPlan, self).update_payload(fields)
if isinstance(data.get('sync_date'), datetime):
data['sync_date'] = data['sync_date'].strftime('%Y-%m-%d %H:%M:%S')
return data | python | def update_payload(self, fields=None):
"""Convert ``sync_date`` to a string if datetime object provided."""
data = super(SyncPlan, self).update_payload(fields)
if isinstance(data.get('sync_date'), datetime):
data['sync_date'] = data['sync_date'].strftime('%Y-%m-%d %H:%M:%S')
return data | [
"def",
"update_payload",
"(",
"self",
",",
"fields",
"=",
"None",
")",
":",
"data",
"=",
"super",
"(",
"SyncPlan",
",",
"self",
")",
".",
"update_payload",
"(",
"fields",
")",
"if",
"isinstance",
"(",
"data",
".",
"get",
"(",
"'sync_date'",
")",
",",
... | Convert ``sync_date`` to a string if datetime object provided. | [
"Convert",
"sync_date",
"to",
"a",
"string",
"if",
"datetime",
"object",
"provided",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L7346-L7351 | train | 208,678 |
SatelliteQE/nailgun | nailgun/entities.py | VirtWhoConfig.deploy_script | def deploy_script(self, synchronous=True, **kwargs):
"""Helper for Config's deploy_script method.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.get(self.path('deploy_script'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | python | def deploy_script(self, synchronous=True, **kwargs):
"""Helper for Config's deploy_script method.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy() # shadow the passed-in kwargs
kwargs.update(self._server_config.get_client_kwargs())
response = client.get(self.path('deploy_script'), **kwargs)
return _handle_response(response, self._server_config, synchronous) | [
"def",
"deploy_script",
"(",
"self",
",",
"synchronous",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"# shadow the passed-in kwargs",
"kwargs",
".",
"update",
"(",
"self",
".",
"_server_config",
".",
"get... | Helper for Config's deploy_script method.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message. | [
"Helper",
"for",
"Config",
"s",
"deploy_script",
"method",
"."
] | c36d8c20862e87bf6975bd48ac1ca40a9e634eaa | https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entities.py#L7809-L7824 | train | 208,679 |
Games-and-Simulations/sc-docker | scbw/docker_utils.py | ensure_local_net | def ensure_local_net(
network_name: str = DOCKER_STARCRAFT_NETWORK,
subnet_cidr: str = SUBNET_CIDR
) -> None:
"""
Create docker local net if not found.
:raises docker.errors.APIError
"""
logger.info(f"checking whether docker has network {network_name}")
ipam_pool = docker.types.IPAMPool(subnet=subnet_cidr)
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
networks = docker_client.networks.list(names=DOCKER_STARCRAFT_NETWORK)
output = networks[0].short_id if networks else None
if not output:
logger.info("network not found, creating ...")
output = docker_client.networks.create(DOCKER_STARCRAFT_NETWORK, ipam=ipam_config).short_id
logger.debug(f"docker network id: {output}") | python | def ensure_local_net(
network_name: str = DOCKER_STARCRAFT_NETWORK,
subnet_cidr: str = SUBNET_CIDR
) -> None:
"""
Create docker local net if not found.
:raises docker.errors.APIError
"""
logger.info(f"checking whether docker has network {network_name}")
ipam_pool = docker.types.IPAMPool(subnet=subnet_cidr)
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
networks = docker_client.networks.list(names=DOCKER_STARCRAFT_NETWORK)
output = networks[0].short_id if networks else None
if not output:
logger.info("network not found, creating ...")
output = docker_client.networks.create(DOCKER_STARCRAFT_NETWORK, ipam=ipam_config).short_id
logger.debug(f"docker network id: {output}") | [
"def",
"ensure_local_net",
"(",
"network_name",
":",
"str",
"=",
"DOCKER_STARCRAFT_NETWORK",
",",
"subnet_cidr",
":",
"str",
"=",
"SUBNET_CIDR",
")",
"->",
"None",
":",
"logger",
".",
"info",
"(",
"f\"checking whether docker has network {network_name}\"",
")",
"ipam_p... | Create docker local net if not found.
:raises docker.errors.APIError | [
"Create",
"docker",
"local",
"net",
"if",
"not",
"found",
"."
] | 1d7adb9b5839783655564afc4bbcd204a0055dcb | https://github.com/Games-and-Simulations/sc-docker/blob/1d7adb9b5839783655564afc4bbcd204a0055dcb/scbw/docker_utils.py#L73-L90 | train | 208,680 |
Games-and-Simulations/sc-docker | scbw/docker_utils.py | ensure_local_image | def ensure_local_image(
local_image: str,
parent_image: str = SC_PARENT_IMAGE,
java_image: str = SC_JAVA_IMAGE,
starcraft_base_dir: str = SCBW_BASE_DIR,
starcraft_binary_link: str = SC_BINARY_LINK,
) -> None:
"""
Check if `local_image` is present locally. If it is not, pull parent images and build.
This includes pulling starcraft binary.
:raises docker.errors.ImageNotFound
:raises docker.errors.APIError
"""
logger.info(f"checking if there is local image {local_image}")
docker_images = docker_client.images.list(local_image)
if len(docker_images) and docker_images[0].short_id is not None:
logger.info(f"image {local_image} found locally.")
return
logger.info("image not found locally, creating...")
pkg_docker_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "local_docker")
base_dir = os.path.join(starcraft_base_dir, "docker")
logger.info(f"copying files from {pkg_docker_dir} to {base_dir}.")
distutils.dir_util.copy_tree(pkg_docker_dir, base_dir)
starcraft_zip_file = f"{base_dir}/starcraft.zip"
if not os.path.exists(starcraft_zip_file):
logger.info(f"downloading starcraft.zip to {starcraft_zip_file}")
download_file(starcraft_binary_link, starcraft_zip_file)
logger.info(f"pulling image {parent_image}, this may take a while...")
pulled_image = docker_client.images.pull(parent_image)
pulled_image.tag(java_image)
logger.info(f"building local image {local_image}, this may take a while...")
docker_client.images.build(path=base_dir, dockerfile="game.dockerfile", tag=local_image)
logger.info(f"successfully built image {local_image}") | python | def ensure_local_image(
local_image: str,
parent_image: str = SC_PARENT_IMAGE,
java_image: str = SC_JAVA_IMAGE,
starcraft_base_dir: str = SCBW_BASE_DIR,
starcraft_binary_link: str = SC_BINARY_LINK,
) -> None:
"""
Check if `local_image` is present locally. If it is not, pull parent images and build.
This includes pulling starcraft binary.
:raises docker.errors.ImageNotFound
:raises docker.errors.APIError
"""
logger.info(f"checking if there is local image {local_image}")
docker_images = docker_client.images.list(local_image)
if len(docker_images) and docker_images[0].short_id is not None:
logger.info(f"image {local_image} found locally.")
return
logger.info("image not found locally, creating...")
pkg_docker_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "local_docker")
base_dir = os.path.join(starcraft_base_dir, "docker")
logger.info(f"copying files from {pkg_docker_dir} to {base_dir}.")
distutils.dir_util.copy_tree(pkg_docker_dir, base_dir)
starcraft_zip_file = f"{base_dir}/starcraft.zip"
if not os.path.exists(starcraft_zip_file):
logger.info(f"downloading starcraft.zip to {starcraft_zip_file}")
download_file(starcraft_binary_link, starcraft_zip_file)
logger.info(f"pulling image {parent_image}, this may take a while...")
pulled_image = docker_client.images.pull(parent_image)
pulled_image.tag(java_image)
logger.info(f"building local image {local_image}, this may take a while...")
docker_client.images.build(path=base_dir, dockerfile="game.dockerfile", tag=local_image)
logger.info(f"successfully built image {local_image}") | [
"def",
"ensure_local_image",
"(",
"local_image",
":",
"str",
",",
"parent_image",
":",
"str",
"=",
"SC_PARENT_IMAGE",
",",
"java_image",
":",
"str",
"=",
"SC_JAVA_IMAGE",
",",
"starcraft_base_dir",
":",
"str",
"=",
"SCBW_BASE_DIR",
",",
"starcraft_binary_link",
":... | Check if `local_image` is present locally. If it is not, pull parent images and build.
This includes pulling starcraft binary.
:raises docker.errors.ImageNotFound
:raises docker.errors.APIError | [
"Check",
"if",
"local_image",
"is",
"present",
"locally",
".",
"If",
"it",
"is",
"not",
"pull",
"parent",
"images",
"and",
"build",
".",
"This",
"includes",
"pulling",
"starcraft",
"binary",
"."
] | 1d7adb9b5839783655564afc4bbcd204a0055dcb | https://github.com/Games-and-Simulations/sc-docker/blob/1d7adb9b5839783655564afc4bbcd204a0055dcb/scbw/docker_utils.py#L93-L130 | train | 208,681 |
Games-and-Simulations/sc-docker | scbw/docker_utils.py | check_dockermachine | def check_dockermachine() -> bool:
"""
Checks that docker-machine is available on the computer
:raises FileNotFoundError if docker-machine is not present
"""
logger.debug("checking docker-machine presence")
# noinspection PyBroadException
try:
out = subprocess \
.check_output(["docker-machine", "version"]) \
.decode("utf-8") \
.replace("docker-machine.exe", "") \
.replace("docker-machine", "") \
.strip()
logger.debug(f"using docker machine version {out}")
return True
except Exception:
logger.debug(f"docker machine not present")
return False | python | def check_dockermachine() -> bool:
"""
Checks that docker-machine is available on the computer
:raises FileNotFoundError if docker-machine is not present
"""
logger.debug("checking docker-machine presence")
# noinspection PyBroadException
try:
out = subprocess \
.check_output(["docker-machine", "version"]) \
.decode("utf-8") \
.replace("docker-machine.exe", "") \
.replace("docker-machine", "") \
.strip()
logger.debug(f"using docker machine version {out}")
return True
except Exception:
logger.debug(f"docker machine not present")
return False | [
"def",
"check_dockermachine",
"(",
")",
"->",
"bool",
":",
"logger",
".",
"debug",
"(",
"\"checking docker-machine presence\"",
")",
"# noinspection PyBroadException",
"try",
":",
"out",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"\"docker-machine\"",
",",
"\... | Checks that docker-machine is available on the computer
:raises FileNotFoundError if docker-machine is not present | [
"Checks",
"that",
"docker",
"-",
"machine",
"is",
"available",
"on",
"the",
"computer"
] | 1d7adb9b5839783655564afc4bbcd204a0055dcb | https://github.com/Games-and-Simulations/sc-docker/blob/1d7adb9b5839783655564afc4bbcd204a0055dcb/scbw/docker_utils.py#L145-L164 | train | 208,682 |
Games-and-Simulations/sc-docker | scbw/docker_utils.py | dockermachine_ip | def dockermachine_ip() -> Optional[str]:
"""
Gets IP address of the default docker machine
Returns None if no docker-machine executable
in the PATH and if there no Docker machine
with name default present
"""
if not check_dockermachine():
return None
# noinspection PyBroadException
try:
out = subprocess.check_output(['docker-machine', 'ip'])
return out.decode("utf-8").strip()
except Exception:
logger.debug(f"docker machine not present")
return None | python | def dockermachine_ip() -> Optional[str]:
"""
Gets IP address of the default docker machine
Returns None if no docker-machine executable
in the PATH and if there no Docker machine
with name default present
"""
if not check_dockermachine():
return None
# noinspection PyBroadException
try:
out = subprocess.check_output(['docker-machine', 'ip'])
return out.decode("utf-8").strip()
except Exception:
logger.debug(f"docker machine not present")
return None | [
"def",
"dockermachine_ip",
"(",
")",
"->",
"Optional",
"[",
"str",
"]",
":",
"if",
"not",
"check_dockermachine",
"(",
")",
":",
"return",
"None",
"# noinspection PyBroadException",
"try",
":",
"out",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"'docker-m... | Gets IP address of the default docker machine
Returns None if no docker-machine executable
in the PATH and if there no Docker machine
with name default present | [
"Gets",
"IP",
"address",
"of",
"the",
"default",
"docker",
"machine",
"Returns",
"None",
"if",
"no",
"docker",
"-",
"machine",
"executable",
"in",
"the",
"PATH",
"and",
"if",
"there",
"no",
"Docker",
"machine",
"with",
"name",
"default",
"present"
] | 1d7adb9b5839783655564afc4bbcd204a0055dcb | https://github.com/Games-and-Simulations/sc-docker/blob/1d7adb9b5839783655564afc4bbcd204a0055dcb/scbw/docker_utils.py#L167-L183 | train | 208,683 |
Games-and-Simulations/sc-docker | scbw/docker_utils.py | xoscmounts | def xoscmounts(host_mount):
"""
Cross OS compatible mount dirs
"""
callback_lower_drive_letter = lambda pat: pat.group(1).lower()
host_mount = re.sub(r"^([a-zA-Z])\:", callback_lower_drive_letter, host_mount)
host_mount = re.sub(r"^([a-z])", "//\\1", host_mount)
host_mount = re.sub(r"\\", "/", host_mount)
return host_mount | python | def xoscmounts(host_mount):
"""
Cross OS compatible mount dirs
"""
callback_lower_drive_letter = lambda pat: pat.group(1).lower()
host_mount = re.sub(r"^([a-zA-Z])\:", callback_lower_drive_letter, host_mount)
host_mount = re.sub(r"^([a-z])", "//\\1", host_mount)
host_mount = re.sub(r"\\", "/", host_mount)
return host_mount | [
"def",
"xoscmounts",
"(",
"host_mount",
")",
":",
"callback_lower_drive_letter",
"=",
"lambda",
"pat",
":",
"pat",
".",
"group",
"(",
"1",
")",
".",
"lower",
"(",
")",
"host_mount",
"=",
"re",
".",
"sub",
"(",
"r\"^([a-zA-Z])\\:\"",
",",
"callback_lower_driv... | Cross OS compatible mount dirs | [
"Cross",
"OS",
"compatible",
"mount",
"dirs"
] | 1d7adb9b5839783655564afc4bbcd204a0055dcb | https://github.com/Games-and-Simulations/sc-docker/blob/1d7adb9b5839783655564afc4bbcd204a0055dcb/scbw/docker_utils.py#L186-L194 | train | 208,684 |
Rambatino/CHAID | CHAID/stats.py | chisquare | def chisquare(n_ij, weighted):
"""
Calculates the chisquare for a matrix of ind_v x dep_v
for the unweighted and SPSS weighted case
"""
if weighted:
m_ij = n_ij / n_ij
nan_mask = np.isnan(m_ij)
m_ij[nan_mask] = 0.000001 # otherwise it breaks the chi-squared test
w_ij = m_ij
n_ij_col_sum = n_ij.sum(axis=1)
n_ij_row_sum = n_ij.sum(axis=0)
alpha, beta, eps = (1, 1, 1)
while eps > 10e-6:
alpha = alpha * np.vstack(n_ij_col_sum / m_ij.sum(axis=1))
beta = n_ij_row_sum / (alpha * w_ij).sum(axis=0)
eps = np.max(np.absolute(w_ij * alpha * beta - m_ij))
m_ij = w_ij * alpha * beta
else:
m_ij = (np.vstack(n_ij.sum(axis=1)) * n_ij.sum(axis=0)) / n_ij.sum().astype(float)
dof = (n_ij.shape[0] - 1) * (n_ij.shape[1] - 1)
chi, p_val = stats.chisquare(n_ij, f_exp=m_ij, ddof=n_ij.size - 1 - dof, axis=None)
return (chi, p_val, dof) | python | def chisquare(n_ij, weighted):
"""
Calculates the chisquare for a matrix of ind_v x dep_v
for the unweighted and SPSS weighted case
"""
if weighted:
m_ij = n_ij / n_ij
nan_mask = np.isnan(m_ij)
m_ij[nan_mask] = 0.000001 # otherwise it breaks the chi-squared test
w_ij = m_ij
n_ij_col_sum = n_ij.sum(axis=1)
n_ij_row_sum = n_ij.sum(axis=0)
alpha, beta, eps = (1, 1, 1)
while eps > 10e-6:
alpha = alpha * np.vstack(n_ij_col_sum / m_ij.sum(axis=1))
beta = n_ij_row_sum / (alpha * w_ij).sum(axis=0)
eps = np.max(np.absolute(w_ij * alpha * beta - m_ij))
m_ij = w_ij * alpha * beta
else:
m_ij = (np.vstack(n_ij.sum(axis=1)) * n_ij.sum(axis=0)) / n_ij.sum().astype(float)
dof = (n_ij.shape[0] - 1) * (n_ij.shape[1] - 1)
chi, p_val = stats.chisquare(n_ij, f_exp=m_ij, ddof=n_ij.size - 1 - dof, axis=None)
return (chi, p_val, dof) | [
"def",
"chisquare",
"(",
"n_ij",
",",
"weighted",
")",
":",
"if",
"weighted",
":",
"m_ij",
"=",
"n_ij",
"/",
"n_ij",
"nan_mask",
"=",
"np",
".",
"isnan",
"(",
"m_ij",
")",
"m_ij",
"[",
"nan_mask",
"]",
"=",
"0.000001",
"# otherwise it breaks the chi-square... | Calculates the chisquare for a matrix of ind_v x dep_v
for the unweighted and SPSS weighted case | [
"Calculates",
"the",
"chisquare",
"for",
"a",
"matrix",
"of",
"ind_v",
"x",
"dep_v",
"for",
"the",
"unweighted",
"and",
"SPSS",
"weighted",
"case"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/stats.py#L9-L36 | train | 208,685 |
Rambatino/CHAID | CHAID/stats.py | Stats.best_split | def best_split(self, ind, dep):
""" determine which splitting function to apply """
if isinstance(dep, ContinuousColumn):
return self.best_con_split(ind, dep)
else:
return self.best_cat_heuristic_split(ind, dep) | python | def best_split(self, ind, dep):
""" determine which splitting function to apply """
if isinstance(dep, ContinuousColumn):
return self.best_con_split(ind, dep)
else:
return self.best_cat_heuristic_split(ind, dep) | [
"def",
"best_split",
"(",
"self",
",",
"ind",
",",
"dep",
")",
":",
"if",
"isinstance",
"(",
"dep",
",",
"ContinuousColumn",
")",
":",
"return",
"self",
".",
"best_con_split",
"(",
"ind",
",",
"dep",
")",
"else",
":",
"return",
"self",
".",
"best_cat_h... | determine which splitting function to apply | [
"determine",
"which",
"splitting",
"function",
"to",
"apply"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/stats.py#L49-L54 | train | 208,686 |
Rambatino/CHAID | CHAID/tree.py | Tree.from_numpy | def from_numpy(ndarr, arr, alpha_merge=0.05, max_depth=2, min_parent_node_size=30,
min_child_node_size=30, split_titles=None, split_threshold=0, weights=None,
variable_types=None, dep_variable_type='categorical'):
"""
Create a CHAID object from numpy
Parameters
----------
ndarr : numpy.ndarray
non-aggregated 2-dimensional array containing
independent variables on the veritcal axis and (usually)
respondent level data on the horizontal axis
arr : numpy.ndarray
1-dimensional array of the dependent variable associated with
ndarr
alpha_merge : float
the threshold value in which to create a split (default 0.05)
max_depth : float
the threshold value for the maximum number of levels after the root
node in the tree (default 2)
min_parent_node_size : float
the threshold value of the number of respondents that the node must
contain (default 30)
split_titles : array-like
array of names for the independent variables in the data
variable_types : array-like or dict
array of variable types, or dict of column names to variable types.
Supported variable types are the strings 'nominal' or 'ordinal' in
lower case
"""
vectorised_array = []
variable_types = variable_types or ['nominal'] * ndarr.shape[1]
for ind, col_type in enumerate(variable_types):
title = None
if split_titles is not None: title = split_titles[ind]
if col_type == 'ordinal':
col = OrdinalColumn(ndarr[:, ind], name=title)
elif col_type == 'nominal':
col = NominalColumn(ndarr[:, ind], name=title)
else:
raise NotImplementedError('Unknown independent variable type ' + col_type)
vectorised_array.append(col)
if dep_variable_type == 'categorical':
observed = NominalColumn(arr, weights=weights)
elif dep_variable_type == 'continuous':
observed = ContinuousColumn(arr, weights=weights)
else:
raise NotImplementedError('Unknown dependent variable type ' + dep_variable_type)
config = { 'alpha_merge': alpha_merge, 'max_depth': max_depth, 'min_parent_node_size': min_parent_node_size,
'min_child_node_size': min_child_node_size, 'split_threshold': split_threshold }
return Tree(vectorised_array, observed, config) | python | def from_numpy(ndarr, arr, alpha_merge=0.05, max_depth=2, min_parent_node_size=30,
min_child_node_size=30, split_titles=None, split_threshold=0, weights=None,
variable_types=None, dep_variable_type='categorical'):
"""
Create a CHAID object from numpy
Parameters
----------
ndarr : numpy.ndarray
non-aggregated 2-dimensional array containing
independent variables on the veritcal axis and (usually)
respondent level data on the horizontal axis
arr : numpy.ndarray
1-dimensional array of the dependent variable associated with
ndarr
alpha_merge : float
the threshold value in which to create a split (default 0.05)
max_depth : float
the threshold value for the maximum number of levels after the root
node in the tree (default 2)
min_parent_node_size : float
the threshold value of the number of respondents that the node must
contain (default 30)
split_titles : array-like
array of names for the independent variables in the data
variable_types : array-like or dict
array of variable types, or dict of column names to variable types.
Supported variable types are the strings 'nominal' or 'ordinal' in
lower case
"""
vectorised_array = []
variable_types = variable_types or ['nominal'] * ndarr.shape[1]
for ind, col_type in enumerate(variable_types):
title = None
if split_titles is not None: title = split_titles[ind]
if col_type == 'ordinal':
col = OrdinalColumn(ndarr[:, ind], name=title)
elif col_type == 'nominal':
col = NominalColumn(ndarr[:, ind], name=title)
else:
raise NotImplementedError('Unknown independent variable type ' + col_type)
vectorised_array.append(col)
if dep_variable_type == 'categorical':
observed = NominalColumn(arr, weights=weights)
elif dep_variable_type == 'continuous':
observed = ContinuousColumn(arr, weights=weights)
else:
raise NotImplementedError('Unknown dependent variable type ' + dep_variable_type)
config = { 'alpha_merge': alpha_merge, 'max_depth': max_depth, 'min_parent_node_size': min_parent_node_size,
'min_child_node_size': min_child_node_size, 'split_threshold': split_threshold }
return Tree(vectorised_array, observed, config) | [
"def",
"from_numpy",
"(",
"ndarr",
",",
"arr",
",",
"alpha_merge",
"=",
"0.05",
",",
"max_depth",
"=",
"2",
",",
"min_parent_node_size",
"=",
"30",
",",
"min_child_node_size",
"=",
"30",
",",
"split_titles",
"=",
"None",
",",
"split_threshold",
"=",
"0",
"... | Create a CHAID object from numpy
Parameters
----------
ndarr : numpy.ndarray
non-aggregated 2-dimensional array containing
independent variables on the veritcal axis and (usually)
respondent level data on the horizontal axis
arr : numpy.ndarray
1-dimensional array of the dependent variable associated with
ndarr
alpha_merge : float
the threshold value in which to create a split (default 0.05)
max_depth : float
the threshold value for the maximum number of levels after the root
node in the tree (default 2)
min_parent_node_size : float
the threshold value of the number of respondents that the node must
contain (default 30)
split_titles : array-like
array of names for the independent variables in the data
variable_types : array-like or dict
array of variable types, or dict of column names to variable types.
Supported variable types are the strings 'nominal' or 'ordinal' in
lower case | [
"Create",
"a",
"CHAID",
"object",
"from",
"numpy"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/tree.py#L45-L96 | train | 208,687 |
Rambatino/CHAID | CHAID/tree.py | Tree.build_tree | def build_tree(self):
""" Build chaid tree """
self._tree_store = []
self.node(np.arange(0, self.data_size, dtype=np.int), self.vectorised_array, self.observed) | python | def build_tree(self):
""" Build chaid tree """
self._tree_store = []
self.node(np.arange(0, self.data_size, dtype=np.int), self.vectorised_array, self.observed) | [
"def",
"build_tree",
"(",
"self",
")",
":",
"self",
".",
"_tree_store",
"=",
"[",
"]",
"self",
".",
"node",
"(",
"np",
".",
"arange",
"(",
"0",
",",
"self",
".",
"data_size",
",",
"dtype",
"=",
"np",
".",
"int",
")",
",",
"self",
".",
"vectorised... | Build chaid tree | [
"Build",
"chaid",
"tree"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/tree.py#L98-L101 | train | 208,688 |
Rambatino/CHAID | CHAID/tree.py | Tree.from_pandas_df | def from_pandas_df(df, i_variables, d_variable, alpha_merge=0.05, max_depth=2,
min_parent_node_size=30, min_child_node_size=30, split_threshold=0,
weight=None, dep_variable_type='categorical'):
"""
Helper method to pre-process a pandas data frame in order to run CHAID
analysis
Parameters
----------
df : pandas.DataFrame
the dataframe with the dependent and independent variables in which
to slice from
i_variables : dict
dict of instance variable names with their variable types. Supported
variable types are the strings 'nominal' or 'ordinal' in lower case
d_variable : string
the name of the dependent variable in the dataframe
alpha_merge : float
the threshold value in which to create a split (default 0.05)
max_depth : float
the threshold value for the maximum number of levels after the root
node in the tree (default 2)
split_threshold : float
the variation in chi-score such that surrogate splits are created
(default 0)
min_parent_node_size : float
the threshold value of the number of respondents that the node must
contain (default 30)
min_child_node_size : float
the threshold value of the number of respondents that each child node must
contain (default 30)
weight : array-like
the respondent weights. If passed, weighted chi-square calculation is run
dep_variable_type : str
the type of dependent variable. Supported variable types are 'categorical' or
'continuous'
"""
ind_df = df[list(i_variables.keys())]
ind_values = ind_df.values
dep_values = df[d_variable].values
weights = df[weight] if weight is not None else None
return Tree.from_numpy(ind_values, dep_values, alpha_merge, max_depth, min_parent_node_size,
min_child_node_size, list(ind_df.columns.values), split_threshold, weights,
list(i_variables.values()), dep_variable_type) | python | def from_pandas_df(df, i_variables, d_variable, alpha_merge=0.05, max_depth=2,
min_parent_node_size=30, min_child_node_size=30, split_threshold=0,
weight=None, dep_variable_type='categorical'):
"""
Helper method to pre-process a pandas data frame in order to run CHAID
analysis
Parameters
----------
df : pandas.DataFrame
the dataframe with the dependent and independent variables in which
to slice from
i_variables : dict
dict of instance variable names with their variable types. Supported
variable types are the strings 'nominal' or 'ordinal' in lower case
d_variable : string
the name of the dependent variable in the dataframe
alpha_merge : float
the threshold value in which to create a split (default 0.05)
max_depth : float
the threshold value for the maximum number of levels after the root
node in the tree (default 2)
split_threshold : float
the variation in chi-score such that surrogate splits are created
(default 0)
min_parent_node_size : float
the threshold value of the number of respondents that the node must
contain (default 30)
min_child_node_size : float
the threshold value of the number of respondents that each child node must
contain (default 30)
weight : array-like
the respondent weights. If passed, weighted chi-square calculation is run
dep_variable_type : str
the type of dependent variable. Supported variable types are 'categorical' or
'continuous'
"""
ind_df = df[list(i_variables.keys())]
ind_values = ind_df.values
dep_values = df[d_variable].values
weights = df[weight] if weight is not None else None
return Tree.from_numpy(ind_values, dep_values, alpha_merge, max_depth, min_parent_node_size,
min_child_node_size, list(ind_df.columns.values), split_threshold, weights,
list(i_variables.values()), dep_variable_type) | [
"def",
"from_pandas_df",
"(",
"df",
",",
"i_variables",
",",
"d_variable",
",",
"alpha_merge",
"=",
"0.05",
",",
"max_depth",
"=",
"2",
",",
"min_parent_node_size",
"=",
"30",
",",
"min_child_node_size",
"=",
"30",
",",
"split_threshold",
"=",
"0",
",",
"wei... | Helper method to pre-process a pandas data frame in order to run CHAID
analysis
Parameters
----------
df : pandas.DataFrame
the dataframe with the dependent and independent variables in which
to slice from
i_variables : dict
dict of instance variable names with their variable types. Supported
variable types are the strings 'nominal' or 'ordinal' in lower case
d_variable : string
the name of the dependent variable in the dataframe
alpha_merge : float
the threshold value in which to create a split (default 0.05)
max_depth : float
the threshold value for the maximum number of levels after the root
node in the tree (default 2)
split_threshold : float
the variation in chi-score such that surrogate splits are created
(default 0)
min_parent_node_size : float
the threshold value of the number of respondents that the node must
contain (default 30)
min_child_node_size : float
the threshold value of the number of respondents that each child node must
contain (default 30)
weight : array-like
the respondent weights. If passed, weighted chi-square calculation is run
dep_variable_type : str
the type of dependent variable. Supported variable types are 'categorical' or
'continuous' | [
"Helper",
"method",
"to",
"pre",
"-",
"process",
"a",
"pandas",
"data",
"frame",
"in",
"order",
"to",
"run",
"CHAID",
"analysis"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/tree.py#L110-L153 | train | 208,689 |
Rambatino/CHAID | CHAID/tree.py | Tree.node | def node(self, rows, ind, dep, depth=0, parent=None, parent_decisions=None):
""" internal method to create a node in the tree """
depth += 1
if self.max_depth < depth:
terminal_node = Node(choices=parent_decisions, node_id=self.node_count,
parent=parent, indices=rows, dep_v=dep)
self._tree_store.append(terminal_node)
self.node_count += 1
terminal_node.split.invalid_reason = InvalidSplitReason.MAX_DEPTH
return self._tree_store
split = self._stats.best_split(ind, dep)
node = Node(choices=parent_decisions, node_id=self.node_count, indices=rows, dep_v=dep,
parent=parent, split=split)
self._tree_store.append(node)
parent = self.node_count
self.node_count += 1
if not split.valid():
return self._tree_store
for index, choices in enumerate(split.splits):
correct_rows = np.in1d(ind[split.column_id].arr, choices)
dep_slice = dep[correct_rows]
ind_slice = [vect[correct_rows] for vect in ind]
row_slice = rows[correct_rows]
if self.min_parent_node_size < len(dep_slice.arr):
self.node(row_slice, ind_slice, dep_slice, depth=depth, parent=parent,
parent_decisions=split.split_map[index])
else:
terminal_node = Node(choices=split.split_map[index], node_id=self.node_count,
parent=parent, indices=row_slice, dep_v=dep_slice)
terminal_node.split.invalid_reason = InvalidSplitReason.MIN_PARENT_NODE_SIZE
self._tree_store.append(terminal_node)
self.node_count += 1
return self._tree_store | python | def node(self, rows, ind, dep, depth=0, parent=None, parent_decisions=None):
""" internal method to create a node in the tree """
depth += 1
if self.max_depth < depth:
terminal_node = Node(choices=parent_decisions, node_id=self.node_count,
parent=parent, indices=rows, dep_v=dep)
self._tree_store.append(terminal_node)
self.node_count += 1
terminal_node.split.invalid_reason = InvalidSplitReason.MAX_DEPTH
return self._tree_store
split = self._stats.best_split(ind, dep)
node = Node(choices=parent_decisions, node_id=self.node_count, indices=rows, dep_v=dep,
parent=parent, split=split)
self._tree_store.append(node)
parent = self.node_count
self.node_count += 1
if not split.valid():
return self._tree_store
for index, choices in enumerate(split.splits):
correct_rows = np.in1d(ind[split.column_id].arr, choices)
dep_slice = dep[correct_rows]
ind_slice = [vect[correct_rows] for vect in ind]
row_slice = rows[correct_rows]
if self.min_parent_node_size < len(dep_slice.arr):
self.node(row_slice, ind_slice, dep_slice, depth=depth, parent=parent,
parent_decisions=split.split_map[index])
else:
terminal_node = Node(choices=split.split_map[index], node_id=self.node_count,
parent=parent, indices=row_slice, dep_v=dep_slice)
terminal_node.split.invalid_reason = InvalidSplitReason.MIN_PARENT_NODE_SIZE
self._tree_store.append(terminal_node)
self.node_count += 1
return self._tree_store | [
"def",
"node",
"(",
"self",
",",
"rows",
",",
"ind",
",",
"dep",
",",
"depth",
"=",
"0",
",",
"parent",
"=",
"None",
",",
"parent_decisions",
"=",
"None",
")",
":",
"depth",
"+=",
"1",
"if",
"self",
".",
"max_depth",
"<",
"depth",
":",
"terminal_no... | internal method to create a node in the tree | [
"internal",
"method",
"to",
"create",
"a",
"node",
"in",
"the",
"tree"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/tree.py#L155-L193 | train | 208,690 |
Rambatino/CHAID | CHAID/tree.py | Tree.to_tree | def to_tree(self):
""" returns a TreeLib tree """
tree = TreeLibTree()
for node in self:
tree.create_node(node, node.node_id, parent=node.parent)
return tree | python | def to_tree(self):
""" returns a TreeLib tree """
tree = TreeLibTree()
for node in self:
tree.create_node(node, node.node_id, parent=node.parent)
return tree | [
"def",
"to_tree",
"(",
"self",
")",
":",
"tree",
"=",
"TreeLibTree",
"(",
")",
"for",
"node",
"in",
"self",
":",
"tree",
".",
"create_node",
"(",
"node",
",",
"node",
".",
"node_id",
",",
"parent",
"=",
"node",
".",
"parent",
")",
"return",
"tree"
] | returns a TreeLib tree | [
"returns",
"a",
"TreeLib",
"tree"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/tree.py#L199-L204 | train | 208,691 |
Rambatino/CHAID | CHAID/tree.py | Tree.node_predictions | def node_predictions(self):
""" Determines which rows fall into which node """
pred = np.zeros(self.data_size)
for node in self:
if node.is_terminal:
pred[node.indices] = node.node_id
return pred | python | def node_predictions(self):
""" Determines which rows fall into which node """
pred = np.zeros(self.data_size)
for node in self:
if node.is_terminal:
pred[node.indices] = node.node_id
return pred | [
"def",
"node_predictions",
"(",
"self",
")",
":",
"pred",
"=",
"np",
".",
"zeros",
"(",
"self",
".",
"data_size",
")",
"for",
"node",
"in",
"self",
":",
"if",
"node",
".",
"is_terminal",
":",
"pred",
"[",
"node",
".",
"indices",
"]",
"=",
"node",
"... | Determines which rows fall into which node | [
"Determines",
"which",
"rows",
"fall",
"into",
"which",
"node"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/tree.py#L227-L233 | train | 208,692 |
Rambatino/CHAID | CHAID/tree.py | Tree.model_predictions | def model_predictions(self):
"""
Determines the highest frequency of
categorical dependent variable in the
terminal node where that row fell
"""
if isinstance(self.observed, ContinuousColumn):
return ValueError("Cannot make model predictions on a continuous scale")
pred = np.zeros(self.data_size).astype('object')
for node in self:
if node.is_terminal:
pred[node.indices] = max(node.members, key=node.members.get)
return pred | python | def model_predictions(self):
"""
Determines the highest frequency of
categorical dependent variable in the
terminal node where that row fell
"""
if isinstance(self.observed, ContinuousColumn):
return ValueError("Cannot make model predictions on a continuous scale")
pred = np.zeros(self.data_size).astype('object')
for node in self:
if node.is_terminal:
pred[node.indices] = max(node.members, key=node.members.get)
return pred | [
"def",
"model_predictions",
"(",
"self",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"observed",
",",
"ContinuousColumn",
")",
":",
"return",
"ValueError",
"(",
"\"Cannot make model predictions on a continuous scale\"",
")",
"pred",
"=",
"np",
".",
"zeros",
"(... | Determines the highest frequency of
categorical dependent variable in the
terminal node where that row fell | [
"Determines",
"the",
"highest",
"frequency",
"of",
"categorical",
"dependent",
"variable",
"in",
"the",
"terminal",
"node",
"where",
"that",
"row",
"fell"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/tree.py#L260-L272 | train | 208,693 |
Rambatino/CHAID | CHAID/column.py | Column.bell_set | def bell_set(self, collection, ordinal=False):
"""
Calculates the Bell set
"""
if len(collection) == 1:
yield [ collection ]
return
first = collection[0]
for smaller in self.bell_set(collection[1:]):
for n, subset in enumerate(smaller):
if not ordinal or (ordinal and is_sorted(smaller[:n] + [[ first ] + subset] + smaller[n+1:], self._nan)):
yield smaller[:n] + [[ first ] + subset] + smaller[n+1:]
if not ordinal or (ordinal and is_sorted([ [ first ] ] + smaller, self._nan)):
yield [ [ first ] ] + smaller | python | def bell_set(self, collection, ordinal=False):
"""
Calculates the Bell set
"""
if len(collection) == 1:
yield [ collection ]
return
first = collection[0]
for smaller in self.bell_set(collection[1:]):
for n, subset in enumerate(smaller):
if not ordinal or (ordinal and is_sorted(smaller[:n] + [[ first ] + subset] + smaller[n+1:], self._nan)):
yield smaller[:n] + [[ first ] + subset] + smaller[n+1:]
if not ordinal or (ordinal and is_sorted([ [ first ] ] + smaller, self._nan)):
yield [ [ first ] ] + smaller | [
"def",
"bell_set",
"(",
"self",
",",
"collection",
",",
"ordinal",
"=",
"False",
")",
":",
"if",
"len",
"(",
"collection",
")",
"==",
"1",
":",
"yield",
"[",
"collection",
"]",
"return",
"first",
"=",
"collection",
"[",
"0",
"]",
"for",
"smaller",
"i... | Calculates the Bell set | [
"Calculates",
"the",
"Bell",
"set"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/column.py#L64-L79 | train | 208,694 |
Rambatino/CHAID | CHAID/column.py | NominalColumn.substitute_values | def substitute_values(self, vect):
"""
Internal method to substitute integers into the vector, and construct
metadata to convert back to the original vector.
np.nan is always given -1, all other objects are given integers in
order of apperence.
Parameters
----------
vect : np.array
the vector in which to substitute values in
"""
try:
unique = np.unique(vect)
except:
unique = set(vect)
unique = [
x for x in unique if not isinstance(x, float) or not isnan(x)
]
arr = np.copy(vect)
for new_id, value in enumerate(unique):
np.place(arr, arr==value, new_id)
self.metadata[new_id] = value
arr = arr.astype(np.float)
np.place(arr, np.isnan(arr), -1)
self.arr = arr
if -1 in arr:
self.metadata[-1] = self._missing_id | python | def substitute_values(self, vect):
"""
Internal method to substitute integers into the vector, and construct
metadata to convert back to the original vector.
np.nan is always given -1, all other objects are given integers in
order of apperence.
Parameters
----------
vect : np.array
the vector in which to substitute values in
"""
try:
unique = np.unique(vect)
except:
unique = set(vect)
unique = [
x for x in unique if not isinstance(x, float) or not isnan(x)
]
arr = np.copy(vect)
for new_id, value in enumerate(unique):
np.place(arr, arr==value, new_id)
self.metadata[new_id] = value
arr = arr.astype(np.float)
np.place(arr, np.isnan(arr), -1)
self.arr = arr
if -1 in arr:
self.metadata[-1] = self._missing_id | [
"def",
"substitute_values",
"(",
"self",
",",
"vect",
")",
":",
"try",
":",
"unique",
"=",
"np",
".",
"unique",
"(",
"vect",
")",
"except",
":",
"unique",
"=",
"set",
"(",
"vect",
")",
"unique",
"=",
"[",
"x",
"for",
"x",
"in",
"unique",
"if",
"n... | Internal method to substitute integers into the vector, and construct
metadata to convert back to the original vector.
np.nan is always given -1, all other objects are given integers in
order of apperence.
Parameters
----------
vect : np.array
the vector in which to substitute values in | [
"Internal",
"method",
"to",
"substitute",
"integers",
"into",
"the",
"vector",
"and",
"construct",
"metadata",
"to",
"convert",
"back",
"to",
"the",
"original",
"vector",
"."
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/column.py#L104-L136 | train | 208,695 |
Rambatino/CHAID | CHAID/split.py | Split.sub_split_values | def sub_split_values(self, sub):
""" Substitutes the splits with other values into the split_map """
for i, arr in enumerate(self.splits):
self.split_map[i] = [sub.get(x, x) for x in arr]
for split in self.surrogates:
split.sub_split_values(sub) | python | def sub_split_values(self, sub):
""" Substitutes the splits with other values into the split_map """
for i, arr in enumerate(self.splits):
self.split_map[i] = [sub.get(x, x) for x in arr]
for split in self.surrogates:
split.sub_split_values(sub) | [
"def",
"sub_split_values",
"(",
"self",
",",
"sub",
")",
":",
"for",
"i",
",",
"arr",
"in",
"enumerate",
"(",
"self",
".",
"splits",
")",
":",
"self",
".",
"split_map",
"[",
"i",
"]",
"=",
"[",
"sub",
".",
"get",
"(",
"x",
",",
"x",
")",
"for",... | Substitutes the splits with other values into the split_map | [
"Substitutes",
"the",
"splits",
"with",
"other",
"values",
"into",
"the",
"split_map"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/split.py#L34-L39 | train | 208,696 |
Rambatino/CHAID | CHAID/split.py | Split.name_columns | def name_columns(self, sub):
""" Substitutes the split column index with a human readable string """
if self.column_id is not None and len(sub) > self.column_id:
self.split_name = sub[self.column_id]
for split in self.surrogates:
split.name_columns(sub) | python | def name_columns(self, sub):
""" Substitutes the split column index with a human readable string """
if self.column_id is not None and len(sub) > self.column_id:
self.split_name = sub[self.column_id]
for split in self.surrogates:
split.name_columns(sub) | [
"def",
"name_columns",
"(",
"self",
",",
"sub",
")",
":",
"if",
"self",
".",
"column_id",
"is",
"not",
"None",
"and",
"len",
"(",
"sub",
")",
">",
"self",
".",
"column_id",
":",
"self",
".",
"split_name",
"=",
"sub",
"[",
"self",
".",
"column_id",
... | Substitutes the split column index with a human readable string | [
"Substitutes",
"the",
"split",
"column",
"index",
"with",
"a",
"human",
"readable",
"string"
] | dc19e41ebdf2773168733efdf0d7579950c8d2e7 | https://github.com/Rambatino/CHAID/blob/dc19e41ebdf2773168733efdf0d7579950c8d2e7/CHAID/split.py#L41-L46 | train | 208,697 |
jilljenn/tryalgo | tryalgo/three_partition.py | three_partition | def three_partition(x):
"""partition a set of integers in 3 parts of same total value
:param x: table of non negative values
:returns: triplet of the integers encoding the sets, or None otherwise
:complexity: :math:`O(2^{2n})`
"""
f = [0] * (1 << len(x))
for i in range(len(x)):
for S in range(1 << i):
f[S | (1 << i)] = f[S] + x[i]
for A in range(1 << len(x)):
for B in range(1 << len(x)):
if A & B == 0 and f[A] == f[B] and 3 * f[A] == f[-1]:
return (A, B, ((1 << len(x)) - 1) ^ A ^ B)
return None | python | def three_partition(x):
"""partition a set of integers in 3 parts of same total value
:param x: table of non negative values
:returns: triplet of the integers encoding the sets, or None otherwise
:complexity: :math:`O(2^{2n})`
"""
f = [0] * (1 << len(x))
for i in range(len(x)):
for S in range(1 << i):
f[S | (1 << i)] = f[S] + x[i]
for A in range(1 << len(x)):
for B in range(1 << len(x)):
if A & B == 0 and f[A] == f[B] and 3 * f[A] == f[-1]:
return (A, B, ((1 << len(x)) - 1) ^ A ^ B)
return None | [
"def",
"three_partition",
"(",
"x",
")",
":",
"f",
"=",
"[",
"0",
"]",
"*",
"(",
"1",
"<<",
"len",
"(",
"x",
")",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"x",
")",
")",
":",
"for",
"S",
"in",
"range",
"(",
"1",
"<<",
"i",
")",
... | partition a set of integers in 3 parts of same total value
:param x: table of non negative values
:returns: triplet of the integers encoding the sets, or None otherwise
:complexity: :math:`O(2^{2n})` | [
"partition",
"a",
"set",
"of",
"integers",
"in",
"3",
"parts",
"of",
"same",
"total",
"value"
] | 89a4dd9655e7b6b0a176f72b4c60d0196420dfe1 | https://github.com/jilljenn/tryalgo/blob/89a4dd9655e7b6b0a176f72b4c60d0196420dfe1/tryalgo/three_partition.py#L8-L23 | train | 208,698 |
jilljenn/tryalgo | tryalgo/freivalds.py | freivalds | def freivalds(A, B, C):
"""Tests matrix product AB=C by Freivalds
:param A: n by n numerical matrix
:param B: same
:param C: same
:returns: False with high probability if AB != C
:complexity:
:math:`O(n^2)`
"""
n = len(A)
x = [randint(0, 1000000) for j in range(n)]
return mult(A, mult(B, x)) == mult(C, x) | python | def freivalds(A, B, C):
"""Tests matrix product AB=C by Freivalds
:param A: n by n numerical matrix
:param B: same
:param C: same
:returns: False with high probability if AB != C
:complexity:
:math:`O(n^2)`
"""
n = len(A)
x = [randint(0, 1000000) for j in range(n)]
return mult(A, mult(B, x)) == mult(C, x) | [
"def",
"freivalds",
"(",
"A",
",",
"B",
",",
"C",
")",
":",
"n",
"=",
"len",
"(",
"A",
")",
"x",
"=",
"[",
"randint",
"(",
"0",
",",
"1000000",
")",
"for",
"j",
"in",
"range",
"(",
"n",
")",
"]",
"return",
"mult",
"(",
"A",
",",
"mult",
"... | Tests matrix product AB=C by Freivalds
:param A: n by n numerical matrix
:param B: same
:param C: same
:returns: False with high probability if AB != C
:complexity:
:math:`O(n^2)` | [
"Tests",
"matrix",
"product",
"AB",
"=",
"C",
"by",
"Freivalds"
] | 89a4dd9655e7b6b0a176f72b4c60d0196420dfe1 | https://github.com/jilljenn/tryalgo/blob/89a4dd9655e7b6b0a176f72b4c60d0196420dfe1/tryalgo/freivalds.py#L36-L49 | train | 208,699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.