repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
StellarCN/py-stellar-base | stellar_base/operation.py | Operation.to_xdr_amount | def to_xdr_amount(value):
"""Converts an amount to the appropriate value to send over the network
as a part of an XDR object.
Each asset amount is encoded as a signed 64-bit integer in the XDR
structures. An asset amount unit (that which is seen by end users) is
scaled down by a factor of ten million (10,000,000) to arrive at the
native 64-bit integer representation. For example, the integer amount
value 25,123,456 equals 2.5123456 units of the asset. This scaling
allows for seven decimal places of precision in human-friendly amount
units.
This static method correctly multiplies the value by the scaling factor
in order to come to the integer value used in XDR structures.
See `Stellar's documentation on Asset Precision
<https://www.stellar.org/developers/guides/concepts/assets.html#amount-precision-and-representation>`_
for more information.
:param str value: The amount to convert to an integer for XDR
serialization.
"""
if not isinstance(value, str):
raise NotValidParamError("Value of type '{}' must be of type String, but got {}".format(value, type(value)))
# throw exception if value * ONE has decimal places (it can't be
# represented as int64)
try:
amount = int((Decimal(value) * ONE).to_integral_exact(context=Context(traps=[Inexact])))
except decimal.Inexact:
raise NotValidParamError("Value of '{}' must have at most 7 digits after the decimal.".format(value))
except decimal.InvalidOperation:
raise NotValidParamError("Value of '{}' must represent a positive number.".format(value))
return amount | python | def to_xdr_amount(value):
"""Converts an amount to the appropriate value to send over the network
as a part of an XDR object.
Each asset amount is encoded as a signed 64-bit integer in the XDR
structures. An asset amount unit (that which is seen by end users) is
scaled down by a factor of ten million (10,000,000) to arrive at the
native 64-bit integer representation. For example, the integer amount
value 25,123,456 equals 2.5123456 units of the asset. This scaling
allows for seven decimal places of precision in human-friendly amount
units.
This static method correctly multiplies the value by the scaling factor
in order to come to the integer value used in XDR structures.
See `Stellar's documentation on Asset Precision
<https://www.stellar.org/developers/guides/concepts/assets.html#amount-precision-and-representation>`_
for more information.
:param str value: The amount to convert to an integer for XDR
serialization.
"""
if not isinstance(value, str):
raise NotValidParamError("Value of type '{}' must be of type String, but got {}".format(value, type(value)))
# throw exception if value * ONE has decimal places (it can't be
# represented as int64)
try:
amount = int((Decimal(value) * ONE).to_integral_exact(context=Context(traps=[Inexact])))
except decimal.Inexact:
raise NotValidParamError("Value of '{}' must have at most 7 digits after the decimal.".format(value))
except decimal.InvalidOperation:
raise NotValidParamError("Value of '{}' must represent a positive number.".format(value))
return amount | [
"def",
"to_xdr_amount",
"(",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"raise",
"NotValidParamError",
"(",
"\"Value of type '{}' must be of type String, but got {}\"",
".",
"format",
"(",
"value",
",",
"type",
"(",
"value",
... | Converts an amount to the appropriate value to send over the network
as a part of an XDR object.
Each asset amount is encoded as a signed 64-bit integer in the XDR
structures. An asset amount unit (that which is seen by end users) is
scaled down by a factor of ten million (10,000,000) to arrive at the
native 64-bit integer representation. For example, the integer amount
value 25,123,456 equals 2.5123456 units of the asset. This scaling
allows for seven decimal places of precision in human-friendly amount
units.
This static method correctly multiplies the value by the scaling factor
in order to come to the integer value used in XDR structures.
See `Stellar's documentation on Asset Precision
<https://www.stellar.org/developers/guides/concepts/assets.html#amount-precision-and-representation>`_
for more information.
:param str value: The amount to convert to an integer for XDR
serialization. | [
"Converts",
"an",
"amount",
"to",
"the",
"appropriate",
"value",
"to",
"send",
"over",
"the",
"network",
"as",
"a",
"part",
"of",
"an",
"XDR",
"object",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/operation.py#L71-L105 | train | 229,300 |
StellarCN/py-stellar-base | stellar_base/memo.py | TextMemo.to_xdr_object | def to_xdr_object(self):
"""Creates an XDR Memo object for a transaction with MEMO_TEXT."""
return Xdr.types.Memo(type=Xdr.const.MEMO_TEXT, text=self.text) | python | def to_xdr_object(self):
"""Creates an XDR Memo object for a transaction with MEMO_TEXT."""
return Xdr.types.Memo(type=Xdr.const.MEMO_TEXT, text=self.text) | [
"def",
"to_xdr_object",
"(",
"self",
")",
":",
"return",
"Xdr",
".",
"types",
".",
"Memo",
"(",
"type",
"=",
"Xdr",
".",
"const",
".",
"MEMO_TEXT",
",",
"text",
"=",
"self",
".",
"text",
")"
] | Creates an XDR Memo object for a transaction with MEMO_TEXT. | [
"Creates",
"an",
"XDR",
"Memo",
"object",
"for",
"a",
"transaction",
"with",
"MEMO_TEXT",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/memo.py#L97-L99 | train | 229,301 |
StellarCN/py-stellar-base | stellar_base/memo.py | IdMemo.to_xdr_object | def to_xdr_object(self):
"""Creates an XDR Memo object for a transaction with MEMO_ID."""
return Xdr.types.Memo(type=Xdr.const.MEMO_ID, id=self.memo_id) | python | def to_xdr_object(self):
"""Creates an XDR Memo object for a transaction with MEMO_ID."""
return Xdr.types.Memo(type=Xdr.const.MEMO_ID, id=self.memo_id) | [
"def",
"to_xdr_object",
"(",
"self",
")",
":",
"return",
"Xdr",
".",
"types",
".",
"Memo",
"(",
"type",
"=",
"Xdr",
".",
"const",
".",
"MEMO_ID",
",",
"id",
"=",
"self",
".",
"memo_id",
")"
] | Creates an XDR Memo object for a transaction with MEMO_ID. | [
"Creates",
"an",
"XDR",
"Memo",
"object",
"for",
"a",
"transaction",
"with",
"MEMO_ID",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/memo.py#L112-L114 | train | 229,302 |
StellarCN/py-stellar-base | stellar_base/memo.py | HashMemo.to_xdr_object | def to_xdr_object(self):
"""Creates an XDR Memo object for a transaction with MEMO_HASH."""
return Xdr.types.Memo(type=Xdr.const.MEMO_HASH, hash=self.memo_hash) | python | def to_xdr_object(self):
"""Creates an XDR Memo object for a transaction with MEMO_HASH."""
return Xdr.types.Memo(type=Xdr.const.MEMO_HASH, hash=self.memo_hash) | [
"def",
"to_xdr_object",
"(",
"self",
")",
":",
"return",
"Xdr",
".",
"types",
".",
"Memo",
"(",
"type",
"=",
"Xdr",
".",
"const",
".",
"MEMO_HASH",
",",
"hash",
"=",
"self",
".",
"memo_hash",
")"
] | Creates an XDR Memo object for a transaction with MEMO_HASH. | [
"Creates",
"an",
"XDR",
"Memo",
"object",
"for",
"a",
"transaction",
"with",
"MEMO_HASH",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/memo.py#L127-L129 | train | 229,303 |
StellarCN/py-stellar-base | stellar_base/memo.py | RetHashMemo.to_xdr_object | def to_xdr_object(self):
"""Creates an XDR Memo object for a transaction with MEMO_RETURN."""
return Xdr.types.Memo(
type=Xdr.const.MEMO_RETURN, retHash=self.memo_return) | python | def to_xdr_object(self):
"""Creates an XDR Memo object for a transaction with MEMO_RETURN."""
return Xdr.types.Memo(
type=Xdr.const.MEMO_RETURN, retHash=self.memo_return) | [
"def",
"to_xdr_object",
"(",
"self",
")",
":",
"return",
"Xdr",
".",
"types",
".",
"Memo",
"(",
"type",
"=",
"Xdr",
".",
"const",
".",
"MEMO_RETURN",
",",
"retHash",
"=",
"self",
".",
"memo_return",
")"
] | Creates an XDR Memo object for a transaction with MEMO_RETURN. | [
"Creates",
"an",
"XDR",
"Memo",
"object",
"for",
"a",
"transaction",
"with",
"MEMO_RETURN",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/memo.py#L148-L151 | train | 229,304 |
StellarCN/py-stellar-base | stellar_base/builder.py | Builder.append_hashx_signer | def append_hashx_signer(self, hashx, signer_weight, source=None):
"""Add a HashX signer to an account.
Add a HashX signer to an account via a :class:`SetOptions
<stellar_base.operation.SetOptions` operation. This is a helper
function for :meth:`append_set_options_op`.
:param hashx: The address of the new hashX signer.
:type hashx: str, bytes
:param int signer_weight: The weight of the new signer.
:param str source: The source account that is adding a signer to its
list of signers.
:return: This builder instance.
"""
return self.append_set_options_op(
signer_address=hashx,
signer_type='hashX',
signer_weight=signer_weight,
source=source) | python | def append_hashx_signer(self, hashx, signer_weight, source=None):
"""Add a HashX signer to an account.
Add a HashX signer to an account via a :class:`SetOptions
<stellar_base.operation.SetOptions` operation. This is a helper
function for :meth:`append_set_options_op`.
:param hashx: The address of the new hashX signer.
:type hashx: str, bytes
:param int signer_weight: The weight of the new signer.
:param str source: The source account that is adding a signer to its
list of signers.
:return: This builder instance.
"""
return self.append_set_options_op(
signer_address=hashx,
signer_type='hashX',
signer_weight=signer_weight,
source=source) | [
"def",
"append_hashx_signer",
"(",
"self",
",",
"hashx",
",",
"signer_weight",
",",
"source",
"=",
"None",
")",
":",
"return",
"self",
".",
"append_set_options_op",
"(",
"signer_address",
"=",
"hashx",
",",
"signer_type",
"=",
"'hashX'",
",",
"signer_weight",
... | Add a HashX signer to an account.
Add a HashX signer to an account via a :class:`SetOptions
<stellar_base.operation.SetOptions` operation. This is a helper
function for :meth:`append_set_options_op`.
:param hashx: The address of the new hashX signer.
:type hashx: str, bytes
:param int signer_weight: The weight of the new signer.
:param str source: The source account that is adding a signer to its
list of signers.
:return: This builder instance. | [
"Add",
"a",
"HashX",
"signer",
"to",
"an",
"account",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/builder.py#L323-L342 | train | 229,305 |
StellarCN/py-stellar-base | stellar_base/builder.py | Builder.append_pre_auth_tx_signer | def append_pre_auth_tx_signer(self,
pre_auth_tx,
signer_weight,
source=None):
"""Add a PreAuthTx signer to an account.
Add a PreAuthTx signer to an account via a :class:`SetOptions
<stellar_base.operation.SetOptions` operation. This is a helper
function for :meth:`append_set_options_op`.
:param pre_auth_tx: The address of the new preAuthTx signer - obtained by calling `hash_meta` on the TransactionEnvelope.
:type pre_auth_tx: str, bytes
:param int signer_weight: The weight of the new signer.
:param str source: The source account that is adding a signer to its
list of signers.
:return: This builder instance.
"""
return self.append_set_options_op(
signer_address=pre_auth_tx,
signer_type='preAuthTx',
signer_weight=signer_weight,
source=source) | python | def append_pre_auth_tx_signer(self,
pre_auth_tx,
signer_weight,
source=None):
"""Add a PreAuthTx signer to an account.
Add a PreAuthTx signer to an account via a :class:`SetOptions
<stellar_base.operation.SetOptions` operation. This is a helper
function for :meth:`append_set_options_op`.
:param pre_auth_tx: The address of the new preAuthTx signer - obtained by calling `hash_meta` on the TransactionEnvelope.
:type pre_auth_tx: str, bytes
:param int signer_weight: The weight of the new signer.
:param str source: The source account that is adding a signer to its
list of signers.
:return: This builder instance.
"""
return self.append_set_options_op(
signer_address=pre_auth_tx,
signer_type='preAuthTx',
signer_weight=signer_weight,
source=source) | [
"def",
"append_pre_auth_tx_signer",
"(",
"self",
",",
"pre_auth_tx",
",",
"signer_weight",
",",
"source",
"=",
"None",
")",
":",
"return",
"self",
".",
"append_set_options_op",
"(",
"signer_address",
"=",
"pre_auth_tx",
",",
"signer_type",
"=",
"'preAuthTx'",
",",... | Add a PreAuthTx signer to an account.
Add a PreAuthTx signer to an account via a :class:`SetOptions
<stellar_base.operation.SetOptions` operation. This is a helper
function for :meth:`append_set_options_op`.
:param pre_auth_tx: The address of the new preAuthTx signer - obtained by calling `hash_meta` on the TransactionEnvelope.
:type pre_auth_tx: str, bytes
:param int signer_weight: The weight of the new signer.
:param str source: The source account that is adding a signer to its
list of signers.
:return: This builder instance. | [
"Add",
"a",
"PreAuthTx",
"signer",
"to",
"an",
"account",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/builder.py#L344-L366 | train | 229,306 |
StellarCN/py-stellar-base | stellar_base/builder.py | Builder.next_builder | def next_builder(self):
"""Create a new builder based off of this one with its sequence number
incremented.
:return: A new Builder instance
:rtype: :class:`Builder`
"""
sequence = self.sequence + 1
next_builder = Builder(
horizon_uri=self.horizon.horizon_uri,
address=self.address,
network=self.network,
sequence=sequence,
fee=self.fee)
next_builder.keypair = self.keypair
return next_builder | python | def next_builder(self):
"""Create a new builder based off of this one with its sequence number
incremented.
:return: A new Builder instance
:rtype: :class:`Builder`
"""
sequence = self.sequence + 1
next_builder = Builder(
horizon_uri=self.horizon.horizon_uri,
address=self.address,
network=self.network,
sequence=sequence,
fee=self.fee)
next_builder.keypair = self.keypair
return next_builder | [
"def",
"next_builder",
"(",
"self",
")",
":",
"sequence",
"=",
"self",
".",
"sequence",
"+",
"1",
"next_builder",
"=",
"Builder",
"(",
"horizon_uri",
"=",
"self",
".",
"horizon",
".",
"horizon_uri",
",",
"address",
"=",
"self",
".",
"address",
",",
"netw... | Create a new builder based off of this one with its sequence number
incremented.
:return: A new Builder instance
:rtype: :class:`Builder` | [
"Create",
"a",
"new",
"builder",
"based",
"off",
"of",
"this",
"one",
"with",
"its",
"sequence",
"number",
"incremented",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/builder.py#L786-L802 | train | 229,307 |
StellarCN/py-stellar-base | stellar_base/builder.py | Builder.get_sequence | def get_sequence(self):
"""Get the sequence number for a given account via Horizon.
:return: The current sequence number for a given account
:rtype: int
"""
if not self.address:
raise StellarAddressInvalidError('No address provided.')
address = self.horizon.account(self.address)
return int(address.get('sequence')) | python | def get_sequence(self):
"""Get the sequence number for a given account via Horizon.
:return: The current sequence number for a given account
:rtype: int
"""
if not self.address:
raise StellarAddressInvalidError('No address provided.')
address = self.horizon.account(self.address)
return int(address.get('sequence')) | [
"def",
"get_sequence",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"address",
":",
"raise",
"StellarAddressInvalidError",
"(",
"'No address provided.'",
")",
"address",
"=",
"self",
".",
"horizon",
".",
"account",
"(",
"self",
".",
"address",
")",
"retu... | Get the sequence number for a given account via Horizon.
:return: The current sequence number for a given account
:rtype: int | [
"Get",
"the",
"sequence",
"number",
"for",
"a",
"given",
"account",
"via",
"Horizon",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/builder.py#L804-L814 | train | 229,308 |
StellarCN/py-stellar-base | stellar_base/asset.py | Asset.to_dict | def to_dict(self):
"""Generate a dict for this object's attributes.
:return: A dict representing an :class:`Asset`
"""
rv = {'code': self.code}
if not self.is_native():
rv['issuer'] = self.issuer
rv['type'] = self.type
else:
rv['type'] = 'native'
return rv | python | def to_dict(self):
"""Generate a dict for this object's attributes.
:return: A dict representing an :class:`Asset`
"""
rv = {'code': self.code}
if not self.is_native():
rv['issuer'] = self.issuer
rv['type'] = self.type
else:
rv['type'] = 'native'
return rv | [
"def",
"to_dict",
"(",
"self",
")",
":",
"rv",
"=",
"{",
"'code'",
":",
"self",
".",
"code",
"}",
"if",
"not",
"self",
".",
"is_native",
"(",
")",
":",
"rv",
"[",
"'issuer'",
"]",
"=",
"self",
".",
"issuer",
"rv",
"[",
"'type'",
"]",
"=",
"self... | Generate a dict for this object's attributes.
:return: A dict representing an :class:`Asset` | [
"Generate",
"a",
"dict",
"for",
"this",
"object",
"s",
"attributes",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/asset.py#L61-L72 | train | 229,309 |
StellarCN/py-stellar-base | stellar_base/stellarxdr/xdrgen.py | id_unique | def id_unique(dict_id, name, lineno):
"""Returns True if dict_id not already used. Otherwise, invokes error"""
if dict_id in name_dict:
global error_occurred
error_occurred = True
print(
"ERROR - {0:s} definition {1:s} at line {2:d} conflicts with {3:s}"
.format(name, dict_id, lineno, name_dict[dict_id]))
return False
else:
return True | python | def id_unique(dict_id, name, lineno):
"""Returns True if dict_id not already used. Otherwise, invokes error"""
if dict_id in name_dict:
global error_occurred
error_occurred = True
print(
"ERROR - {0:s} definition {1:s} at line {2:d} conflicts with {3:s}"
.format(name, dict_id, lineno, name_dict[dict_id]))
return False
else:
return True | [
"def",
"id_unique",
"(",
"dict_id",
",",
"name",
",",
"lineno",
")",
":",
"if",
"dict_id",
"in",
"name_dict",
":",
"global",
"error_occurred",
"error_occurred",
"=",
"True",
"print",
"(",
"\"ERROR - {0:s} definition {1:s} at line {2:d} conflicts with {3:s}\"",
".",
"f... | Returns True if dict_id not already used. Otherwise, invokes error | [
"Returns",
"True",
"if",
"dict_id",
"not",
"already",
"used",
".",
"Otherwise",
"invokes",
"error"
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/stellarxdr/xdrgen.py#L770-L780 | train | 229,310 |
StellarCN/py-stellar-base | stellar_base/base58.py | main | def main():
'''Base58 encode or decode FILE, or standard input, to standard output.'''
import sys
import argparse
stdout = buffer(sys.stdout)
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument(
'file',
metavar='FILE',
nargs='?',
type=argparse.FileType('r'),
default='-')
parser.add_argument(
'-d', '--decode', action='store_true', help='decode data')
parser.add_argument(
'-c',
'--check',
action='store_true',
help='append a checksum before encoding')
args = parser.parse_args()
fun = {
(False, False): b58encode,
(False, True): b58encode_check,
(True, False): b58decode,
(True, True): b58decode_check
}[(args.decode, args.check)]
data = buffer(args.file).read().rstrip(b'\n')
try:
result = fun(data)
except Exception as e:
sys.exit(e)
if not isinstance(result, bytes):
result = result.encode('ascii')
stdout.write(result) | python | def main():
'''Base58 encode or decode FILE, or standard input, to standard output.'''
import sys
import argparse
stdout = buffer(sys.stdout)
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument(
'file',
metavar='FILE',
nargs='?',
type=argparse.FileType('r'),
default='-')
parser.add_argument(
'-d', '--decode', action='store_true', help='decode data')
parser.add_argument(
'-c',
'--check',
action='store_true',
help='append a checksum before encoding')
args = parser.parse_args()
fun = {
(False, False): b58encode,
(False, True): b58encode_check,
(True, False): b58decode,
(True, True): b58decode_check
}[(args.decode, args.check)]
data = buffer(args.file).read().rstrip(b'\n')
try:
result = fun(data)
except Exception as e:
sys.exit(e)
if not isinstance(result, bytes):
result = result.encode('ascii')
stdout.write(result) | [
"def",
"main",
"(",
")",
":",
"import",
"sys",
"import",
"argparse",
"stdout",
"=",
"buffer",
"(",
"sys",
".",
"stdout",
")",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"main",
".",
"__doc__",
")",
"parser",
".",
"add_argu... | Base58 encode or decode FILE, or standard input, to standard output. | [
"Base58",
"encode",
"or",
"decode",
"FILE",
"or",
"standard",
"input",
"to",
"standard",
"output",
"."
] | cce2e782064fb3955c85e1696e630d67b1010848 | https://github.com/StellarCN/py-stellar-base/blob/cce2e782064fb3955c85e1696e630d67b1010848/stellar_base/base58.py#L93-L134 | train | 229,311 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/sles_11/utils.py | Utils._Dhcpcd | def _Dhcpcd(self, interfaces, logger):
"""Use dhcpcd to activate the interfaces.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
"""
for interface in interfaces:
dhcpcd = ['/sbin/dhcpcd']
try:
subprocess.check_call(dhcpcd + ['-x', interface])
except subprocess.CalledProcessError:
# Dhcpcd not yet running for this device.
logger.info('Dhcpcd not yet running for interface %s.', interface)
try:
subprocess.check_call(dhcpcd + [interface])
except subprocess.CalledProcessError:
# The interface is already active.
logger.warning('Could not activate interface %s.', interface) | python | def _Dhcpcd(self, interfaces, logger):
"""Use dhcpcd to activate the interfaces.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
"""
for interface in interfaces:
dhcpcd = ['/sbin/dhcpcd']
try:
subprocess.check_call(dhcpcd + ['-x', interface])
except subprocess.CalledProcessError:
# Dhcpcd not yet running for this device.
logger.info('Dhcpcd not yet running for interface %s.', interface)
try:
subprocess.check_call(dhcpcd + [interface])
except subprocess.CalledProcessError:
# The interface is already active.
logger.warning('Could not activate interface %s.', interface) | [
"def",
"_Dhcpcd",
"(",
"self",
",",
"interfaces",
",",
"logger",
")",
":",
"for",
"interface",
"in",
"interfaces",
":",
"dhcpcd",
"=",
"[",
"'/sbin/dhcpcd'",
"]",
"try",
":",
"subprocess",
".",
"check_call",
"(",
"dhcpcd",
"+",
"[",
"'-x'",
",",
"interfa... | Use dhcpcd to activate the interfaces.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port. | [
"Use",
"dhcpcd",
"to",
"activate",
"the",
"interfaces",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/sles_11/utils.py#L44-L62 | train | 229,312 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_manager.py | _CreateTempDir | def _CreateTempDir(prefix, run_dir=None):
"""Context manager for creating a temporary directory.
Args:
prefix: string, the prefix for the temporary directory.
run_dir: string, the base directory location of the temporary directory.
Yields:
string, the temporary directory created.
"""
temp_dir = tempfile.mkdtemp(prefix=prefix + '-', dir=run_dir)
try:
yield temp_dir
finally:
shutil.rmtree(temp_dir) | python | def _CreateTempDir(prefix, run_dir=None):
"""Context manager for creating a temporary directory.
Args:
prefix: string, the prefix for the temporary directory.
run_dir: string, the base directory location of the temporary directory.
Yields:
string, the temporary directory created.
"""
temp_dir = tempfile.mkdtemp(prefix=prefix + '-', dir=run_dir)
try:
yield temp_dir
finally:
shutil.rmtree(temp_dir) | [
"def",
"_CreateTempDir",
"(",
"prefix",
",",
"run_dir",
"=",
"None",
")",
":",
"temp_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"prefix",
"=",
"prefix",
"+",
"'-'",
",",
"dir",
"=",
"run_dir",
")",
"try",
":",
"yield",
"temp_dir",
"finally",
":",
"shu... | Context manager for creating a temporary directory.
Args:
prefix: string, the prefix for the temporary directory.
run_dir: string, the base directory location of the temporary directory.
Yields:
string, the temporary directory created. | [
"Context",
"manager",
"for",
"creating",
"a",
"temporary",
"directory",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_manager.py#L31-L45 | train | 229,313 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_manager.py | ScriptManager._RunScripts | def _RunScripts(self, run_dir=None):
"""Retrieve metadata scripts and execute them.
Args:
run_dir: string, the base directory location of the temporary directory.
"""
with _CreateTempDir(self.script_type, run_dir=run_dir) as dest_dir:
try:
self.logger.info('Starting %s scripts.', self.script_type)
script_dict = self.retriever.GetScripts(dest_dir)
self.executor.RunScripts(script_dict)
finally:
self.logger.info('Finished running %s scripts.', self.script_type) | python | def _RunScripts(self, run_dir=None):
"""Retrieve metadata scripts and execute them.
Args:
run_dir: string, the base directory location of the temporary directory.
"""
with _CreateTempDir(self.script_type, run_dir=run_dir) as dest_dir:
try:
self.logger.info('Starting %s scripts.', self.script_type)
script_dict = self.retriever.GetScripts(dest_dir)
self.executor.RunScripts(script_dict)
finally:
self.logger.info('Finished running %s scripts.', self.script_type) | [
"def",
"_RunScripts",
"(",
"self",
",",
"run_dir",
"=",
"None",
")",
":",
"with",
"_CreateTempDir",
"(",
"self",
".",
"script_type",
",",
"run_dir",
"=",
"run_dir",
")",
"as",
"dest_dir",
":",
"try",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'Star... | Retrieve metadata scripts and execute them.
Args:
run_dir: string, the base directory location of the temporary directory. | [
"Retrieve",
"metadata",
"scripts",
"and",
"execute",
"them",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_manager.py#L71-L83 | train | 229,314 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py | InstanceSetup._GetInstanceConfig | def _GetInstanceConfig(self):
"""Get the instance configuration specified in metadata.
Returns:
string, the instance configuration data.
"""
try:
instance_data = self.metadata_dict['instance']['attributes']
except KeyError:
instance_data = {}
self.logger.warning('Instance attributes were not found.')
try:
project_data = self.metadata_dict['project']['attributes']
except KeyError:
project_data = {}
self.logger.warning('Project attributes were not found.')
return (instance_data.get('google-instance-configs')
or project_data.get('google-instance-configs')) | python | def _GetInstanceConfig(self):
"""Get the instance configuration specified in metadata.
Returns:
string, the instance configuration data.
"""
try:
instance_data = self.metadata_dict['instance']['attributes']
except KeyError:
instance_data = {}
self.logger.warning('Instance attributes were not found.')
try:
project_data = self.metadata_dict['project']['attributes']
except KeyError:
project_data = {}
self.logger.warning('Project attributes were not found.')
return (instance_data.get('google-instance-configs')
or project_data.get('google-instance-configs')) | [
"def",
"_GetInstanceConfig",
"(",
"self",
")",
":",
"try",
":",
"instance_data",
"=",
"self",
".",
"metadata_dict",
"[",
"'instance'",
"]",
"[",
"'attributes'",
"]",
"except",
"KeyError",
":",
"instance_data",
"=",
"{",
"}",
"self",
".",
"logger",
".",
"wa... | Get the instance configuration specified in metadata.
Returns:
string, the instance configuration data. | [
"Get",
"the",
"instance",
"configuration",
"specified",
"in",
"metadata",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py#L72-L91 | train | 229,315 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py | InstanceSetup._GenerateSshKey | def _GenerateSshKey(self, key_type, key_dest):
"""Generate a new SSH key.
Args:
key_type: string, the type of the SSH key.
key_dest: string, a file location to store the SSH key.
"""
# Create a temporary file to save the created RSA keys.
with tempfile.NamedTemporaryFile(prefix=key_type, delete=True) as temp:
temp_key = temp.name
command = ['ssh-keygen', '-t', key_type, '-f', temp_key, '-N', '', '-q']
try:
self.logger.info('Generating SSH key %s.', key_dest)
subprocess.check_call(command)
except subprocess.CalledProcessError:
self.logger.warning('Could not create SSH key %s.', key_dest)
return
shutil.move(temp_key, key_dest)
shutil.move('%s.pub' % temp_key, '%s.pub' % key_dest)
file_utils.SetPermissions(key_dest, mode=0o600)
file_utils.SetPermissions('%s.pub' % key_dest, mode=0o644) | python | def _GenerateSshKey(self, key_type, key_dest):
"""Generate a new SSH key.
Args:
key_type: string, the type of the SSH key.
key_dest: string, a file location to store the SSH key.
"""
# Create a temporary file to save the created RSA keys.
with tempfile.NamedTemporaryFile(prefix=key_type, delete=True) as temp:
temp_key = temp.name
command = ['ssh-keygen', '-t', key_type, '-f', temp_key, '-N', '', '-q']
try:
self.logger.info('Generating SSH key %s.', key_dest)
subprocess.check_call(command)
except subprocess.CalledProcessError:
self.logger.warning('Could not create SSH key %s.', key_dest)
return
shutil.move(temp_key, key_dest)
shutil.move('%s.pub' % temp_key, '%s.pub' % key_dest)
file_utils.SetPermissions(key_dest, mode=0o600)
file_utils.SetPermissions('%s.pub' % key_dest, mode=0o644) | [
"def",
"_GenerateSshKey",
"(",
"self",
",",
"key_type",
",",
"key_dest",
")",
":",
"# Create a temporary file to save the created RSA keys.",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"prefix",
"=",
"key_type",
",",
"delete",
"=",
"True",
")",
"as",
"temp"... | Generate a new SSH key.
Args:
key_type: string, the type of the SSH key.
key_dest: string, a file location to store the SSH key. | [
"Generate",
"a",
"new",
"SSH",
"key",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py#L119-L142 | train | 229,316 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py | InstanceSetup._StartSshd | def _StartSshd(self):
"""Initialize the SSH daemon."""
# Exit as early as possible.
# Instance setup systemd scripts block sshd from starting.
if os.path.exists(constants.LOCALBASE + '/bin/systemctl'):
return
elif (os.path.exists('/etc/init.d/ssh')
or os.path.exists('/etc/init/ssh.conf')):
subprocess.call(['service', 'ssh', 'start'])
subprocess.call(['service', 'ssh', 'reload'])
elif (os.path.exists('/etc/init.d/sshd')
or os.path.exists('/etc/init/sshd.conf')):
subprocess.call(['service', 'sshd', 'start'])
subprocess.call(['service', 'sshd', 'reload']) | python | def _StartSshd(self):
"""Initialize the SSH daemon."""
# Exit as early as possible.
# Instance setup systemd scripts block sshd from starting.
if os.path.exists(constants.LOCALBASE + '/bin/systemctl'):
return
elif (os.path.exists('/etc/init.d/ssh')
or os.path.exists('/etc/init/ssh.conf')):
subprocess.call(['service', 'ssh', 'start'])
subprocess.call(['service', 'ssh', 'reload'])
elif (os.path.exists('/etc/init.d/sshd')
or os.path.exists('/etc/init/sshd.conf')):
subprocess.call(['service', 'sshd', 'start'])
subprocess.call(['service', 'sshd', 'reload']) | [
"def",
"_StartSshd",
"(",
"self",
")",
":",
"# Exit as early as possible.",
"# Instance setup systemd scripts block sshd from starting.",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"constants",
".",
"LOCALBASE",
"+",
"'/bin/systemctl'",
")",
":",
"return",
"elif",
"... | Initialize the SSH daemon. | [
"Initialize",
"the",
"SSH",
"daemon",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py#L144-L157 | train | 229,317 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py | InstanceSetup._SetSshHostKeys | def _SetSshHostKeys(self, host_key_types=None):
"""Regenerates SSH host keys when the VM is restarted with a new IP address.
Booting a VM from an image with a known SSH key allows a number of attacks.
This function will regenerating the host key whenever the IP address
changes. This applies the first time the instance is booted, and each time
the disk is used to boot a new instance.
Args:
host_key_types: string, a comma separated list of host key types.
"""
section = 'Instance'
instance_id = self._GetInstanceId()
if instance_id != self.instance_config.GetOptionString(
section, 'instance_id'):
self.logger.info('Generating SSH host keys for instance %s.', instance_id)
file_regex = re.compile(r'ssh_host_(?P<type>[a-z0-9]*)_key\Z')
key_dir = '/etc/ssh'
key_files = [f for f in os.listdir(key_dir) if file_regex.match(f)]
key_types = host_key_types.split(',') if host_key_types else []
key_types_files = ['ssh_host_%s_key' % key_type for key_type in key_types]
for key_file in set(key_files) | set(key_types_files):
key_type = file_regex.match(key_file).group('type')
key_dest = os.path.join(key_dir, key_file)
self._GenerateSshKey(key_type, key_dest)
self._StartSshd()
self.instance_config.SetOption(section, 'instance_id', str(instance_id)) | python | def _SetSshHostKeys(self, host_key_types=None):
"""Regenerates SSH host keys when the VM is restarted with a new IP address.
Booting a VM from an image with a known SSH key allows a number of attacks.
This function will regenerating the host key whenever the IP address
changes. This applies the first time the instance is booted, and each time
the disk is used to boot a new instance.
Args:
host_key_types: string, a comma separated list of host key types.
"""
section = 'Instance'
instance_id = self._GetInstanceId()
if instance_id != self.instance_config.GetOptionString(
section, 'instance_id'):
self.logger.info('Generating SSH host keys for instance %s.', instance_id)
file_regex = re.compile(r'ssh_host_(?P<type>[a-z0-9]*)_key\Z')
key_dir = '/etc/ssh'
key_files = [f for f in os.listdir(key_dir) if file_regex.match(f)]
key_types = host_key_types.split(',') if host_key_types else []
key_types_files = ['ssh_host_%s_key' % key_type for key_type in key_types]
for key_file in set(key_files) | set(key_types_files):
key_type = file_regex.match(key_file).group('type')
key_dest = os.path.join(key_dir, key_file)
self._GenerateSshKey(key_type, key_dest)
self._StartSshd()
self.instance_config.SetOption(section, 'instance_id', str(instance_id)) | [
"def",
"_SetSshHostKeys",
"(",
"self",
",",
"host_key_types",
"=",
"None",
")",
":",
"section",
"=",
"'Instance'",
"instance_id",
"=",
"self",
".",
"_GetInstanceId",
"(",
")",
"if",
"instance_id",
"!=",
"self",
".",
"instance_config",
".",
"GetOptionString",
"... | Regenerates SSH host keys when the VM is restarted with a new IP address.
Booting a VM from an image with a known SSH key allows a number of attacks.
This function will regenerating the host key whenever the IP address
changes. This applies the first time the instance is booted, and each time
the disk is used to boot a new instance.
Args:
host_key_types: string, a comma separated list of host key types. | [
"Regenerates",
"SSH",
"host",
"keys",
"when",
"the",
"VM",
"is",
"restarted",
"with",
"a",
"new",
"IP",
"address",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py#L159-L185 | train | 229,318 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py | InstanceSetup._SetupBotoConfig | def _SetupBotoConfig(self):
"""Set the boto config so GSUtil works with provisioned service accounts."""
project_id = self._GetNumericProjectId()
try:
boto_config.BotoConfig(project_id, debug=self.debug)
except (IOError, OSError) as e:
self.logger.warning(str(e)) | python | def _SetupBotoConfig(self):
"""Set the boto config so GSUtil works with provisioned service accounts."""
project_id = self._GetNumericProjectId()
try:
boto_config.BotoConfig(project_id, debug=self.debug)
except (IOError, OSError) as e:
self.logger.warning(str(e)) | [
"def",
"_SetupBotoConfig",
"(",
"self",
")",
":",
"project_id",
"=",
"self",
".",
"_GetNumericProjectId",
"(",
")",
"try",
":",
"boto_config",
".",
"BotoConfig",
"(",
"project_id",
",",
"debug",
"=",
"self",
".",
"debug",
")",
"except",
"(",
"IOError",
","... | Set the boto config so GSUtil works with provisioned service accounts. | [
"Set",
"the",
"boto",
"config",
"so",
"GSUtil",
"works",
"with",
"provisioned",
"service",
"accounts",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_setup.py#L199-L205 | train | 229,319 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py | ScriptRetriever._DownloadAuthUrl | def _DownloadAuthUrl(self, url, dest_dir):
"""Download a Google Storage URL using an authentication token.
If the token cannot be fetched, fallback to unauthenticated download.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script.
"""
dest_file = tempfile.NamedTemporaryFile(dir=dest_dir, delete=False)
dest_file.close()
dest = dest_file.name
self.logger.info(
'Downloading url from %s to %s using authentication token.', url, dest)
if not self.token:
response = self.watcher.GetMetadata(
self.token_metadata_key, recursive=False, retry=False)
if not response:
self.logger.info(
'Authentication token not found. Attempting unauthenticated '
'download.')
return self._DownloadUrl(url, dest_dir)
self.token = '%s %s' % (
response.get('token_type', ''), response.get('access_token', ''))
try:
request = urlrequest.Request(url)
request.add_unredirected_header('Metadata-Flavor', 'Google')
request.add_unredirected_header('Authorization', self.token)
content = urlrequest.urlopen(request).read().decode('utf-8')
except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:
self.logger.warning('Could not download %s. %s.', url, str(e))
return None
with open(dest, 'wb') as f:
f.write(content)
return dest | python | def _DownloadAuthUrl(self, url, dest_dir):
"""Download a Google Storage URL using an authentication token.
If the token cannot be fetched, fallback to unauthenticated download.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script.
"""
dest_file = tempfile.NamedTemporaryFile(dir=dest_dir, delete=False)
dest_file.close()
dest = dest_file.name
self.logger.info(
'Downloading url from %s to %s using authentication token.', url, dest)
if not self.token:
response = self.watcher.GetMetadata(
self.token_metadata_key, recursive=False, retry=False)
if not response:
self.logger.info(
'Authentication token not found. Attempting unauthenticated '
'download.')
return self._DownloadUrl(url, dest_dir)
self.token = '%s %s' % (
response.get('token_type', ''), response.get('access_token', ''))
try:
request = urlrequest.Request(url)
request.add_unredirected_header('Metadata-Flavor', 'Google')
request.add_unredirected_header('Authorization', self.token)
content = urlrequest.urlopen(request).read().decode('utf-8')
except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:
self.logger.warning('Could not download %s. %s.', url, str(e))
return None
with open(dest, 'wb') as f:
f.write(content)
return dest | [
"def",
"_DownloadAuthUrl",
"(",
"self",
",",
"url",
",",
"dest_dir",
")",
":",
"dest_file",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"dir",
"=",
"dest_dir",
",",
"delete",
"=",
"False",
")",
"dest_file",
".",
"close",
"(",
")",
"dest",
"=",
"dest... | Download a Google Storage URL using an authentication token.
If the token cannot be fetched, fallback to unauthenticated download.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script. | [
"Download",
"a",
"Google",
"Storage",
"URL",
"using",
"an",
"authentication",
"token",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py#L48-L92 | train | 229,320 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py | ScriptRetriever._DownloadUrl | def _DownloadUrl(self, url, dest_dir):
"""Download a script from a given URL.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script.
"""
dest_file = tempfile.NamedTemporaryFile(dir=dest_dir, delete=False)
dest_file.close()
dest = dest_file.name
self.logger.info('Downloading url from %s to %s.', url, dest)
try:
urlretrieve.urlretrieve(url, dest)
return dest
except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:
self.logger.warning('Could not download %s. %s.', url, str(e))
except Exception as e:
self.logger.warning('Exception downloading %s. %s.', url, str(e))
return None | python | def _DownloadUrl(self, url, dest_dir):
"""Download a script from a given URL.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script.
"""
dest_file = tempfile.NamedTemporaryFile(dir=dest_dir, delete=False)
dest_file.close()
dest = dest_file.name
self.logger.info('Downloading url from %s to %s.', url, dest)
try:
urlretrieve.urlretrieve(url, dest)
return dest
except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:
self.logger.warning('Could not download %s. %s.', url, str(e))
except Exception as e:
self.logger.warning('Exception downloading %s. %s.', url, str(e))
return None | [
"def",
"_DownloadUrl",
"(",
"self",
",",
"url",
",",
"dest_dir",
")",
":",
"dest_file",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"dir",
"=",
"dest_dir",
",",
"delete",
"=",
"False",
")",
"dest_file",
".",
"close",
"(",
")",
"dest",
"=",
"dest_fil... | Download a script from a given URL.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script. | [
"Download",
"a",
"script",
"from",
"a",
"given",
"URL",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py#L94-L116 | train | 229,321 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py | ScriptRetriever._DownloadScript | def _DownloadScript(self, url, dest_dir):
"""Download the contents of the URL to the destination.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script.
"""
# Check for the preferred Google Storage URL format:
# gs://<bucket>/<object>
if url.startswith(r'gs://'):
# Convert the string into a standard URL.
url = re.sub('^gs://', 'https://storage.googleapis.com/', url)
return self._DownloadAuthUrl(url, dest_dir)
header = r'http[s]?://'
domain = r'storage\.googleapis\.com'
# Many of the Google Storage URLs are supported below.
# It is prefered that customers specify their object using
# its gs://<bucket>/<object> url.
bucket = r'(?P<bucket>[a-z0-9][-_.a-z0-9]*[a-z0-9])'
# Accept any non-empty string that doesn't contain a wildcard character
obj = r'(?P<obj>[^\*\?]+)'
# Check for the Google Storage URLs:
# http://<bucket>.storage.googleapis.com/<object>
# https://<bucket>.storage.googleapis.com/<object>
gs_regex = re.compile(r'\A%s%s\.%s/%s\Z' % (header, bucket, domain, obj))
match = gs_regex.match(url)
if match:
return self._DownloadAuthUrl(url, dest_dir)
# Check for the other possible Google Storage URLs:
# http://storage.googleapis.com/<bucket>/<object>
# https://storage.googleapis.com/<bucket>/<object>
#
# The following are deprecated but checked:
# http://commondatastorage.googleapis.com/<bucket>/<object>
# https://commondatastorage.googleapis.com/<bucket>/<object>
gs_regex = re.compile(
r'\A%s(commondata)?%s/%s/%s\Z' % (header, domain, bucket, obj))
match = gs_regex.match(url)
if match:
return self._DownloadAuthUrl(url, dest_dir)
# Unauthenticated download of the object.
return self._DownloadUrl(url, dest_dir) | python | def _DownloadScript(self, url, dest_dir):
"""Download the contents of the URL to the destination.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script.
"""
# Check for the preferred Google Storage URL format:
# gs://<bucket>/<object>
if url.startswith(r'gs://'):
# Convert the string into a standard URL.
url = re.sub('^gs://', 'https://storage.googleapis.com/', url)
return self._DownloadAuthUrl(url, dest_dir)
header = r'http[s]?://'
domain = r'storage\.googleapis\.com'
# Many of the Google Storage URLs are supported below.
# It is prefered that customers specify their object using
# its gs://<bucket>/<object> url.
bucket = r'(?P<bucket>[a-z0-9][-_.a-z0-9]*[a-z0-9])'
# Accept any non-empty string that doesn't contain a wildcard character
obj = r'(?P<obj>[^\*\?]+)'
# Check for the Google Storage URLs:
# http://<bucket>.storage.googleapis.com/<object>
# https://<bucket>.storage.googleapis.com/<object>
gs_regex = re.compile(r'\A%s%s\.%s/%s\Z' % (header, bucket, domain, obj))
match = gs_regex.match(url)
if match:
return self._DownloadAuthUrl(url, dest_dir)
# Check for the other possible Google Storage URLs:
# http://storage.googleapis.com/<bucket>/<object>
# https://storage.googleapis.com/<bucket>/<object>
#
# The following are deprecated but checked:
# http://commondatastorage.googleapis.com/<bucket>/<object>
# https://commondatastorage.googleapis.com/<bucket>/<object>
gs_regex = re.compile(
r'\A%s(commondata)?%s/%s/%s\Z' % (header, domain, bucket, obj))
match = gs_regex.match(url)
if match:
return self._DownloadAuthUrl(url, dest_dir)
# Unauthenticated download of the object.
return self._DownloadUrl(url, dest_dir) | [
"def",
"_DownloadScript",
"(",
"self",
",",
"url",
",",
"dest_dir",
")",
":",
"# Check for the preferred Google Storage URL format:",
"# gs://<bucket>/<object>",
"if",
"url",
".",
"startswith",
"(",
"r'gs://'",
")",
":",
"# Convert the string into a standard URL.",
"url",
... | Download the contents of the URL to the destination.
Args:
url: string, the URL to download.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
string, the path to the file storing the metadata script. | [
"Download",
"the",
"contents",
"of",
"the",
"URL",
"to",
"the",
"destination",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py#L118-L168 | train | 229,322 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py | ScriptRetriever._GetAttributeScripts | def _GetAttributeScripts(self, attribute_data, dest_dir):
"""Retrieve the scripts from attribute metadata.
Args:
attribute_data: dict, the contents of the attributes metadata.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
dict, a dictionary mapping metadata keys to files storing scripts.
"""
script_dict = {}
attribute_data = attribute_data or {}
metadata_key = '%s-script' % self.script_type
metadata_value = attribute_data.get(metadata_key)
if metadata_value:
self.logger.info('Found %s in metadata.', metadata_key)
with tempfile.NamedTemporaryFile(
mode='w', dir=dest_dir, delete=False) as dest:
dest.write(metadata_value.lstrip())
script_dict[metadata_key] = dest.name
metadata_key = '%s-script-url' % self.script_type
metadata_value = attribute_data.get(metadata_key)
if metadata_value:
self.logger.info('Found %s in metadata.', metadata_key)
script_dict[metadata_key] = self._DownloadScript(
metadata_value, dest_dir)
return script_dict | python | def _GetAttributeScripts(self, attribute_data, dest_dir):
"""Retrieve the scripts from attribute metadata.
Args:
attribute_data: dict, the contents of the attributes metadata.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
dict, a dictionary mapping metadata keys to files storing scripts.
"""
script_dict = {}
attribute_data = attribute_data or {}
metadata_key = '%s-script' % self.script_type
metadata_value = attribute_data.get(metadata_key)
if metadata_value:
self.logger.info('Found %s in metadata.', metadata_key)
with tempfile.NamedTemporaryFile(
mode='w', dir=dest_dir, delete=False) as dest:
dest.write(metadata_value.lstrip())
script_dict[metadata_key] = dest.name
metadata_key = '%s-script-url' % self.script_type
metadata_value = attribute_data.get(metadata_key)
if metadata_value:
self.logger.info('Found %s in metadata.', metadata_key)
script_dict[metadata_key] = self._DownloadScript(
metadata_value, dest_dir)
return script_dict | [
"def",
"_GetAttributeScripts",
"(",
"self",
",",
"attribute_data",
",",
"dest_dir",
")",
":",
"script_dict",
"=",
"{",
"}",
"attribute_data",
"=",
"attribute_data",
"or",
"{",
"}",
"metadata_key",
"=",
"'%s-script'",
"%",
"self",
".",
"script_type",
"metadata_va... | Retrieve the scripts from attribute metadata.
Args:
attribute_data: dict, the contents of the attributes metadata.
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
dict, a dictionary mapping metadata keys to files storing scripts. | [
"Retrieve",
"the",
"scripts",
"from",
"attribute",
"metadata",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py#L170-L198 | train | 229,323 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py | ScriptRetriever.GetScripts | def GetScripts(self, dest_dir):
"""Retrieve the scripts to execute.
Args:
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
dict, a dictionary mapping set metadata keys with associated scripts.
"""
metadata_dict = self.watcher.GetMetadata() or {}
try:
instance_data = metadata_dict['instance']['attributes']
except KeyError:
instance_data = None
self.logger.warning('Instance attributes were not found.')
try:
project_data = metadata_dict['project']['attributes']
except KeyError:
project_data = None
self.logger.warning('Project attributes were not found.')
return (self._GetAttributeScripts(instance_data, dest_dir)
or self._GetAttributeScripts(project_data, dest_dir)) | python | def GetScripts(self, dest_dir):
"""Retrieve the scripts to execute.
Args:
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
dict, a dictionary mapping set metadata keys with associated scripts.
"""
metadata_dict = self.watcher.GetMetadata() or {}
try:
instance_data = metadata_dict['instance']['attributes']
except KeyError:
instance_data = None
self.logger.warning('Instance attributes were not found.')
try:
project_data = metadata_dict['project']['attributes']
except KeyError:
project_data = None
self.logger.warning('Project attributes were not found.')
return (self._GetAttributeScripts(instance_data, dest_dir)
or self._GetAttributeScripts(project_data, dest_dir)) | [
"def",
"GetScripts",
"(",
"self",
",",
"dest_dir",
")",
":",
"metadata_dict",
"=",
"self",
".",
"watcher",
".",
"GetMetadata",
"(",
")",
"or",
"{",
"}",
"try",
":",
"instance_data",
"=",
"metadata_dict",
"[",
"'instance'",
"]",
"[",
"'attributes'",
"]",
... | Retrieve the scripts to execute.
Args:
dest_dir: string, the path to a directory for storing metadata scripts.
Returns:
dict, a dictionary mapping set metadata keys with associated scripts. | [
"Retrieve",
"the",
"scripts",
"to",
"execute",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_retriever.py#L200-L224 | train | 229,324 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_executor.py | ScriptExecutor._MakeExecutable | def _MakeExecutable(self, metadata_script):
"""Add executable permissions to a file.
Args:
metadata_script: string, the path to the executable file.
"""
mode = os.stat(metadata_script).st_mode
os.chmod(metadata_script, mode | stat.S_IEXEC) | python | def _MakeExecutable(self, metadata_script):
"""Add executable permissions to a file.
Args:
metadata_script: string, the path to the executable file.
"""
mode = os.stat(metadata_script).st_mode
os.chmod(metadata_script, mode | stat.S_IEXEC) | [
"def",
"_MakeExecutable",
"(",
"self",
",",
"metadata_script",
")",
":",
"mode",
"=",
"os",
".",
"stat",
"(",
"metadata_script",
")",
".",
"st_mode",
"os",
".",
"chmod",
"(",
"metadata_script",
",",
"mode",
"|",
"stat",
".",
"S_IEXEC",
")"
] | Add executable permissions to a file.
Args:
metadata_script: string, the path to the executable file. | [
"Add",
"executable",
"permissions",
"to",
"a",
"file",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_executor.py#L38-L45 | train | 229,325 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_executor.py | ScriptExecutor.RunScripts | def RunScripts(self, script_dict):
"""Run the metadata scripts; execute a URL script first if one is provided.
Args:
script_dict: a dictionary mapping metadata keys to script files.
"""
metadata_types = ['%s-script-url', '%s-script']
metadata_keys = [key % self.script_type for key in metadata_types]
metadata_keys = [key for key in metadata_keys if script_dict.get(key)]
if not metadata_keys:
self.logger.info('No %s scripts found in metadata.', self.script_type)
for metadata_key in metadata_keys:
metadata_script = script_dict.get(metadata_key)
self._MakeExecutable(metadata_script)
self._RunScript(metadata_key, metadata_script) | python | def RunScripts(self, script_dict):
"""Run the metadata scripts; execute a URL script first if one is provided.
Args:
script_dict: a dictionary mapping metadata keys to script files.
"""
metadata_types = ['%s-script-url', '%s-script']
metadata_keys = [key % self.script_type for key in metadata_types]
metadata_keys = [key for key in metadata_keys if script_dict.get(key)]
if not metadata_keys:
self.logger.info('No %s scripts found in metadata.', self.script_type)
for metadata_key in metadata_keys:
metadata_script = script_dict.get(metadata_key)
self._MakeExecutable(metadata_script)
self._RunScript(metadata_key, metadata_script) | [
"def",
"RunScripts",
"(",
"self",
",",
"script_dict",
")",
":",
"metadata_types",
"=",
"[",
"'%s-script-url'",
",",
"'%s-script'",
"]",
"metadata_keys",
"=",
"[",
"key",
"%",
"self",
".",
"script_type",
"for",
"key",
"in",
"metadata_types",
"]",
"metadata_keys... | Run the metadata scripts; execute a URL script first if one is provided.
Args:
script_dict: a dictionary mapping metadata keys to script files. | [
"Run",
"the",
"metadata",
"scripts",
";",
"execute",
"a",
"URL",
"script",
"first",
"if",
"one",
"is",
"provided",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_scripts/script_executor.py#L67-L81 | train | 229,326 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/config_manager.py | ConfigManager._AddHeader | def _AddHeader(self, fp):
"""Create a file header in the config.
Args:
fp: int, a file pointer for writing the header.
"""
text = textwrap.wrap(
textwrap.dedent(self.config_header), break_on_hyphens=False)
fp.write('\n'.join(['# ' + line for line in text]))
fp.write('\n\n') | python | def _AddHeader(self, fp):
"""Create a file header in the config.
Args:
fp: int, a file pointer for writing the header.
"""
text = textwrap.wrap(
textwrap.dedent(self.config_header), break_on_hyphens=False)
fp.write('\n'.join(['# ' + line for line in text]))
fp.write('\n\n') | [
"def",
"_AddHeader",
"(",
"self",
",",
"fp",
")",
":",
"text",
"=",
"textwrap",
".",
"wrap",
"(",
"textwrap",
".",
"dedent",
"(",
"self",
".",
"config_header",
")",
",",
"break_on_hyphens",
"=",
"False",
")",
"fp",
".",
"write",
"(",
"'\\n'",
".",
"j... | Create a file header in the config.
Args:
fp: int, a file pointer for writing the header. | [
"Create",
"a",
"file",
"header",
"in",
"the",
"config",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/config_manager.py#L43-L52 | train | 229,327 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/config_manager.py | ConfigManager.SetOption | def SetOption(self, section, option, value, overwrite=True):
"""Set the value of an option in the config file.
Args:
section: string, the section of the config file to check.
option: string, the option to set the value of.
value: string, the value to set the option.
overwrite: bool, True to overwrite an existing value in the config file.
"""
if not overwrite and self.config.has_option(section, option):
return
if not self.config.has_section(section):
self.config.add_section(section)
self.config.set(section, option, str(value)) | python | def SetOption(self, section, option, value, overwrite=True):
"""Set the value of an option in the config file.
Args:
section: string, the section of the config file to check.
option: string, the option to set the value of.
value: string, the value to set the option.
overwrite: bool, True to overwrite an existing value in the config file.
"""
if not overwrite and self.config.has_option(section, option):
return
if not self.config.has_section(section):
self.config.add_section(section)
self.config.set(section, option, str(value)) | [
"def",
"SetOption",
"(",
"self",
",",
"section",
",",
"option",
",",
"value",
",",
"overwrite",
"=",
"True",
")",
":",
"if",
"not",
"overwrite",
"and",
"self",
".",
"config",
".",
"has_option",
"(",
"section",
",",
"option",
")",
":",
"return",
"if",
... | Set the value of an option in the config file.
Args:
section: string, the section of the config file to check.
option: string, the option to set the value of.
value: string, the value to set the option.
overwrite: bool, True to overwrite an existing value in the config file. | [
"Set",
"the",
"value",
"of",
"an",
"option",
"in",
"the",
"config",
"file",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/config_manager.py#L82-L95 | train | 229,328 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/config_manager.py | ConfigManager.WriteConfig | def WriteConfig(self, config_file=None):
"""Write the config values to a given file.
Args:
config_file: string, the file location of the config file to write.
"""
config_file = config_file or self.config_file
config_name = os.path.splitext(os.path.basename(config_file))[0]
config_lock = (
'%s/lock/google_%s.lock' % (constants.LOCALSTATEDIR, config_name))
with file_utils.LockFile(config_lock):
with open(config_file, 'w') as config_fp:
if self.config_header:
self._AddHeader(config_fp)
self.config.write(config_fp) | python | def WriteConfig(self, config_file=None):
"""Write the config values to a given file.
Args:
config_file: string, the file location of the config file to write.
"""
config_file = config_file or self.config_file
config_name = os.path.splitext(os.path.basename(config_file))[0]
config_lock = (
'%s/lock/google_%s.lock' % (constants.LOCALSTATEDIR, config_name))
with file_utils.LockFile(config_lock):
with open(config_file, 'w') as config_fp:
if self.config_header:
self._AddHeader(config_fp)
self.config.write(config_fp) | [
"def",
"WriteConfig",
"(",
"self",
",",
"config_file",
"=",
"None",
")",
":",
"config_file",
"=",
"config_file",
"or",
"self",
".",
"config_file",
"config_name",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"conf... | Write the config values to a given file.
Args:
config_file: string, the file location of the config file to write. | [
"Write",
"the",
"config",
"values",
"to",
"a",
"given",
"file",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/config_manager.py#L97-L111 | train | 229,329 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/logger.py | Logger | def Logger(name, debug=False, facility=None):
"""Get a logging object with handlers for sending logs to SysLog.
Args:
name: string, the name of the logger which will be added to log entries.
debug: bool, True if debug output should write to the console.
facility: int, an encoding of the SysLog handler's facility and priority.
Returns:
logging object, an object for logging entries.
"""
logger = logging.getLogger(name)
logger.handlers = []
logger.addHandler(logging.NullHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(name + ': %(levelname)s %(message)s')
if debug:
# Create a handler for console logging.
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
if facility:
# Create a handler for sending logs to SysLog.
syslog_handler = logging.handlers.SysLogHandler(
address=constants.SYSLOG_SOCKET, facility=facility)
syslog_handler.setLevel(logging.INFO)
syslog_handler.setFormatter(formatter)
logger.addHandler(syslog_handler)
return logger | python | def Logger(name, debug=False, facility=None):
"""Get a logging object with handlers for sending logs to SysLog.
Args:
name: string, the name of the logger which will be added to log entries.
debug: bool, True if debug output should write to the console.
facility: int, an encoding of the SysLog handler's facility and priority.
Returns:
logging object, an object for logging entries.
"""
logger = logging.getLogger(name)
logger.handlers = []
logger.addHandler(logging.NullHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(name + ': %(levelname)s %(message)s')
if debug:
# Create a handler for console logging.
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
if facility:
# Create a handler for sending logs to SysLog.
syslog_handler = logging.handlers.SysLogHandler(
address=constants.SYSLOG_SOCKET, facility=facility)
syslog_handler.setLevel(logging.INFO)
syslog_handler.setFormatter(formatter)
logger.addHandler(syslog_handler)
return logger | [
"def",
"Logger",
"(",
"name",
",",
"debug",
"=",
"False",
",",
"facility",
"=",
"None",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"name",
")",
"logger",
".",
"handlers",
"=",
"[",
"]",
"logger",
".",
"addHandler",
"(",
"logging",
"."... | Get a logging object with handlers for sending logs to SysLog.
Args:
name: string, the name of the logger which will be added to log entries.
debug: bool, True if debug output should write to the console.
facility: int, an encoding of the SysLog handler's facility and priority.
Returns:
logging object, an object for logging entries. | [
"Get",
"a",
"logging",
"object",
"with",
"handlers",
"for",
"sending",
"logs",
"to",
"SysLog",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/logger.py#L22-L55 | train | 229,330 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils._CreateSudoersGroup | def _CreateSudoersGroup(self):
"""Create a Linux group for Google added sudo user accounts."""
if not self._GetGroup(self.google_sudoers_group):
try:
command = self.groupadd_cmd.format(group=self.google_sudoers_group)
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not create the sudoers group. %s.', str(e))
if not os.path.exists(self.google_sudoers_file):
try:
with open(self.google_sudoers_file, 'w') as group:
message = '%{0} ALL=(ALL:ALL) NOPASSWD:ALL'.format(
self.google_sudoers_group)
group.write(message)
except IOError as e:
self.logger.error(
'Could not write sudoers file. %s. %s',
self.google_sudoers_file, str(e))
return
file_utils.SetPermissions(
self.google_sudoers_file, mode=0o440, uid=0, gid=0) | python | def _CreateSudoersGroup(self):
"""Create a Linux group for Google added sudo user accounts."""
if not self._GetGroup(self.google_sudoers_group):
try:
command = self.groupadd_cmd.format(group=self.google_sudoers_group)
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not create the sudoers group. %s.', str(e))
if not os.path.exists(self.google_sudoers_file):
try:
with open(self.google_sudoers_file, 'w') as group:
message = '%{0} ALL=(ALL:ALL) NOPASSWD:ALL'.format(
self.google_sudoers_group)
group.write(message)
except IOError as e:
self.logger.error(
'Could not write sudoers file. %s. %s',
self.google_sudoers_file, str(e))
return
file_utils.SetPermissions(
self.google_sudoers_file, mode=0o440, uid=0, gid=0) | [
"def",
"_CreateSudoersGroup",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_GetGroup",
"(",
"self",
".",
"google_sudoers_group",
")",
":",
"try",
":",
"command",
"=",
"self",
".",
"groupadd_cmd",
".",
"format",
"(",
"group",
"=",
"self",
".",
"googl... | Create a Linux group for Google added sudo user accounts. | [
"Create",
"a",
"Linux",
"group",
"for",
"Google",
"added",
"sudo",
"user",
"accounts",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L92-L114 | train | 229,331 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils._AddUser | def _AddUser(self, user):
"""Configure a Linux user account.
Args:
user: string, the name of the Linux user account to create.
Returns:
bool, True if user creation succeeded.
"""
self.logger.info('Creating a new user account for %s.', user)
command = self.useradd_cmd.format(user=user)
try:
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not create user %s. %s.', user, str(e))
return False
else:
self.logger.info('Created user account %s.', user)
return True | python | def _AddUser(self, user):
"""Configure a Linux user account.
Args:
user: string, the name of the Linux user account to create.
Returns:
bool, True if user creation succeeded.
"""
self.logger.info('Creating a new user account for %s.', user)
command = self.useradd_cmd.format(user=user)
try:
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not create user %s. %s.', user, str(e))
return False
else:
self.logger.info('Created user account %s.', user)
return True | [
"def",
"_AddUser",
"(",
"self",
",",
"user",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'Creating a new user account for %s.'",
",",
"user",
")",
"command",
"=",
"self",
".",
"useradd_cmd",
".",
"format",
"(",
"user",
"=",
"user",
")",
"try",
":... | Configure a Linux user account.
Args:
user: string, the name of the Linux user account to create.
Returns:
bool, True if user creation succeeded. | [
"Configure",
"a",
"Linux",
"user",
"account",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L130-L149 | train | 229,332 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils._UpdateUserGroups | def _UpdateUserGroups(self, user, groups):
"""Update group membership for a Linux user.
Args:
user: string, the name of the Linux user account.
groups: list, the group names to add the user as a member.
Returns:
bool, True if user update succeeded.
"""
groups = ','.join(groups)
self.logger.debug('Updating user %s with groups %s.', user, groups)
command = self.usermod_cmd.format(user=user, groups=groups)
try:
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not update user %s. %s.', user, str(e))
return False
else:
self.logger.debug('Updated user account %s.', user)
return True | python | def _UpdateUserGroups(self, user, groups):
"""Update group membership for a Linux user.
Args:
user: string, the name of the Linux user account.
groups: list, the group names to add the user as a member.
Returns:
bool, True if user update succeeded.
"""
groups = ','.join(groups)
self.logger.debug('Updating user %s with groups %s.', user, groups)
command = self.usermod_cmd.format(user=user, groups=groups)
try:
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not update user %s. %s.', user, str(e))
return False
else:
self.logger.debug('Updated user account %s.', user)
return True | [
"def",
"_UpdateUserGroups",
"(",
"self",
",",
"user",
",",
"groups",
")",
":",
"groups",
"=",
"','",
".",
"join",
"(",
"groups",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'Updating user %s with groups %s.'",
",",
"user",
",",
"groups",
")",
"command"... | Update group membership for a Linux user.
Args:
user: string, the name of the Linux user account.
groups: list, the group names to add the user as a member.
Returns:
bool, True if user update succeeded. | [
"Update",
"group",
"membership",
"for",
"a",
"Linux",
"user",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L151-L171 | train | 229,333 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils._UpdateAuthorizedKeys | def _UpdateAuthorizedKeys(self, user, ssh_keys):
"""Update the authorized keys file for a Linux user with a list of SSH keys.
Args:
user: string, the name of the Linux user account.
ssh_keys: list, the SSH key strings associated with the user.
Raises:
IOError, raised when there is an exception updating a file.
OSError, raised when setting permissions or writing to a read-only
file system.
"""
pw_entry = self._GetUser(user)
if not pw_entry:
return
uid = pw_entry.pw_uid
gid = pw_entry.pw_gid
home_dir = pw_entry.pw_dir
ssh_dir = os.path.join(home_dir, '.ssh')
# Not all sshd's support multiple authorized_keys files so we have to
# share one with the user. We add each of our entries as follows:
# # Added by Google
# authorized_key_entry
authorized_keys_file = os.path.join(ssh_dir, 'authorized_keys')
# Do not write to the authorized keys file if it is a symlink.
if os.path.islink(ssh_dir) or os.path.islink(authorized_keys_file):
self.logger.warning(
'Not updating authorized keys for user %s. File is a symlink.', user)
return
# Create home directory if it does not exist. This can happen if _GetUser
# (getpwnam) returns non-local user info (e.g., from LDAP).
if not os.path.exists(home_dir):
file_utils.SetPermissions(home_dir, mode=0o755, uid=uid, gid=gid,
mkdir=True)
# Create ssh directory if it does not exist.
file_utils.SetPermissions(ssh_dir, mode=0o700, uid=uid, gid=gid, mkdir=True)
# Create entry in the authorized keys file.
prefix = self.logger.name + '-'
with tempfile.NamedTemporaryFile(
mode='w', prefix=prefix, delete=True) as updated_keys:
updated_keys_file = updated_keys.name
if os.path.exists(authorized_keys_file):
lines = open(authorized_keys_file).readlines()
else:
lines = []
google_lines = set()
for i, line in enumerate(lines):
if line.startswith(self.google_comment):
google_lines.update([i, i+1])
# Write user's authorized key entries.
for i, line in enumerate(lines):
if i not in google_lines and line:
line += '\n' if not line.endswith('\n') else ''
updated_keys.write(line)
# Write the Google authorized key entries at the end of the file.
# Each entry is preceded by '# Added by Google'.
for ssh_key in ssh_keys:
ssh_key += '\n' if not ssh_key.endswith('\n') else ''
updated_keys.write('%s\n' % self.google_comment)
updated_keys.write(ssh_key)
# Write buffered data to the updated keys file without closing it and
# update the Linux user's authorized keys file.
updated_keys.flush()
shutil.copy(updated_keys_file, authorized_keys_file)
file_utils.SetPermissions(
authorized_keys_file, mode=0o600, uid=uid, gid=gid) | python | def _UpdateAuthorizedKeys(self, user, ssh_keys):
"""Update the authorized keys file for a Linux user with a list of SSH keys.
Args:
user: string, the name of the Linux user account.
ssh_keys: list, the SSH key strings associated with the user.
Raises:
IOError, raised when there is an exception updating a file.
OSError, raised when setting permissions or writing to a read-only
file system.
"""
pw_entry = self._GetUser(user)
if not pw_entry:
return
uid = pw_entry.pw_uid
gid = pw_entry.pw_gid
home_dir = pw_entry.pw_dir
ssh_dir = os.path.join(home_dir, '.ssh')
# Not all sshd's support multiple authorized_keys files so we have to
# share one with the user. We add each of our entries as follows:
# # Added by Google
# authorized_key_entry
authorized_keys_file = os.path.join(ssh_dir, 'authorized_keys')
# Do not write to the authorized keys file if it is a symlink.
if os.path.islink(ssh_dir) or os.path.islink(authorized_keys_file):
self.logger.warning(
'Not updating authorized keys for user %s. File is a symlink.', user)
return
# Create home directory if it does not exist. This can happen if _GetUser
# (getpwnam) returns non-local user info (e.g., from LDAP).
if not os.path.exists(home_dir):
file_utils.SetPermissions(home_dir, mode=0o755, uid=uid, gid=gid,
mkdir=True)
# Create ssh directory if it does not exist.
file_utils.SetPermissions(ssh_dir, mode=0o700, uid=uid, gid=gid, mkdir=True)
# Create entry in the authorized keys file.
prefix = self.logger.name + '-'
with tempfile.NamedTemporaryFile(
mode='w', prefix=prefix, delete=True) as updated_keys:
updated_keys_file = updated_keys.name
if os.path.exists(authorized_keys_file):
lines = open(authorized_keys_file).readlines()
else:
lines = []
google_lines = set()
for i, line in enumerate(lines):
if line.startswith(self.google_comment):
google_lines.update([i, i+1])
# Write user's authorized key entries.
for i, line in enumerate(lines):
if i not in google_lines and line:
line += '\n' if not line.endswith('\n') else ''
updated_keys.write(line)
# Write the Google authorized key entries at the end of the file.
# Each entry is preceded by '# Added by Google'.
for ssh_key in ssh_keys:
ssh_key += '\n' if not ssh_key.endswith('\n') else ''
updated_keys.write('%s\n' % self.google_comment)
updated_keys.write(ssh_key)
# Write buffered data to the updated keys file without closing it and
# update the Linux user's authorized keys file.
updated_keys.flush()
shutil.copy(updated_keys_file, authorized_keys_file)
file_utils.SetPermissions(
authorized_keys_file, mode=0o600, uid=uid, gid=gid) | [
"def",
"_UpdateAuthorizedKeys",
"(",
"self",
",",
"user",
",",
"ssh_keys",
")",
":",
"pw_entry",
"=",
"self",
".",
"_GetUser",
"(",
"user",
")",
"if",
"not",
"pw_entry",
":",
"return",
"uid",
"=",
"pw_entry",
".",
"pw_uid",
"gid",
"=",
"pw_entry",
".",
... | Update the authorized keys file for a Linux user with a list of SSH keys.
Args:
user: string, the name of the Linux user account.
ssh_keys: list, the SSH key strings associated with the user.
Raises:
IOError, raised when there is an exception updating a file.
OSError, raised when setting permissions or writing to a read-only
file system. | [
"Update",
"the",
"authorized",
"keys",
"file",
"for",
"a",
"Linux",
"user",
"with",
"a",
"list",
"of",
"SSH",
"keys",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L173-L249 | train | 229,334 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils._UpdateSudoer | def _UpdateSudoer(self, user, sudoer=False):
"""Update sudoer group membership for a Linux user account.
Args:
user: string, the name of the Linux user account.
sudoer: bool, True if the user should be a sudoer.
Returns:
bool, True if user update succeeded.
"""
if sudoer:
self.logger.info('Adding user %s to the Google sudoers group.', user)
command = self.gpasswd_add_cmd.format(
user=user, group=self.google_sudoers_group)
else:
self.logger.info('Removing user %s from the Google sudoers group.', user)
command = self.gpasswd_remove_cmd.format(
user=user, group=self.google_sudoers_group)
try:
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not update user %s. %s.', user, str(e))
return False
else:
self.logger.debug('Removed user %s from the Google sudoers group.', user)
return True | python | def _UpdateSudoer(self, user, sudoer=False):
"""Update sudoer group membership for a Linux user account.
Args:
user: string, the name of the Linux user account.
sudoer: bool, True if the user should be a sudoer.
Returns:
bool, True if user update succeeded.
"""
if sudoer:
self.logger.info('Adding user %s to the Google sudoers group.', user)
command = self.gpasswd_add_cmd.format(
user=user, group=self.google_sudoers_group)
else:
self.logger.info('Removing user %s from the Google sudoers group.', user)
command = self.gpasswd_remove_cmd.format(
user=user, group=self.google_sudoers_group)
try:
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not update user %s. %s.', user, str(e))
return False
else:
self.logger.debug('Removed user %s from the Google sudoers group.', user)
return True | [
"def",
"_UpdateSudoer",
"(",
"self",
",",
"user",
",",
"sudoer",
"=",
"False",
")",
":",
"if",
"sudoer",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'Adding user %s to the Google sudoers group.'",
",",
"user",
")",
"command",
"=",
"self",
".",
"gpasswd_ad... | Update sudoer group membership for a Linux user account.
Args:
user: string, the name of the Linux user account.
sudoer: bool, True if the user should be a sudoer.
Returns:
bool, True if user update succeeded. | [
"Update",
"sudoer",
"group",
"membership",
"for",
"a",
"Linux",
"user",
"account",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L251-L277 | train | 229,335 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils._RemoveAuthorizedKeys | def _RemoveAuthorizedKeys(self, user):
"""Remove a Linux user account's authorized keys file to prevent login.
Args:
user: string, the Linux user account to remove access.
"""
pw_entry = self._GetUser(user)
if not pw_entry:
return
home_dir = pw_entry.pw_dir
authorized_keys_file = os.path.join(home_dir, '.ssh', 'authorized_keys')
if os.path.exists(authorized_keys_file):
try:
os.remove(authorized_keys_file)
except OSError as e:
message = 'Could not remove authorized keys for user %s. %s.'
self.logger.warning(message, user, str(e)) | python | def _RemoveAuthorizedKeys(self, user):
"""Remove a Linux user account's authorized keys file to prevent login.
Args:
user: string, the Linux user account to remove access.
"""
pw_entry = self._GetUser(user)
if not pw_entry:
return
home_dir = pw_entry.pw_dir
authorized_keys_file = os.path.join(home_dir, '.ssh', 'authorized_keys')
if os.path.exists(authorized_keys_file):
try:
os.remove(authorized_keys_file)
except OSError as e:
message = 'Could not remove authorized keys for user %s. %s.'
self.logger.warning(message, user, str(e)) | [
"def",
"_RemoveAuthorizedKeys",
"(",
"self",
",",
"user",
")",
":",
"pw_entry",
"=",
"self",
".",
"_GetUser",
"(",
"user",
")",
"if",
"not",
"pw_entry",
":",
"return",
"home_dir",
"=",
"pw_entry",
".",
"pw_dir",
"authorized_keys_file",
"=",
"os",
".",
"pat... | Remove a Linux user account's authorized keys file to prevent login.
Args:
user: string, the Linux user account to remove access. | [
"Remove",
"a",
"Linux",
"user",
"account",
"s",
"authorized",
"keys",
"file",
"to",
"prevent",
"login",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L279-L296 | train | 229,336 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils.GetConfiguredUsers | def GetConfiguredUsers(self):
"""Retrieve the list of configured Google user accounts.
Returns:
list, the username strings of users congfigured by Google.
"""
if os.path.exists(self.google_users_file):
users = open(self.google_users_file).readlines()
else:
users = []
return [user.strip() for user in users] | python | def GetConfiguredUsers(self):
"""Retrieve the list of configured Google user accounts.
Returns:
list, the username strings of users congfigured by Google.
"""
if os.path.exists(self.google_users_file):
users = open(self.google_users_file).readlines()
else:
users = []
return [user.strip() for user in users] | [
"def",
"GetConfiguredUsers",
"(",
"self",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"google_users_file",
")",
":",
"users",
"=",
"open",
"(",
"self",
".",
"google_users_file",
")",
".",
"readlines",
"(",
")",
"else",
":",
"use... | Retrieve the list of configured Google user accounts.
Returns:
list, the username strings of users congfigured by Google. | [
"Retrieve",
"the",
"list",
"of",
"configured",
"Google",
"user",
"accounts",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L298-L308 | train | 229,337 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils.SetConfiguredUsers | def SetConfiguredUsers(self, users):
"""Set the list of configured Google user accounts.
Args:
users: list, the username strings of the Linux accounts.
"""
prefix = self.logger.name + '-'
with tempfile.NamedTemporaryFile(
mode='w', prefix=prefix, delete=True) as updated_users:
updated_users_file = updated_users.name
for user in users:
updated_users.write(user + '\n')
updated_users.flush()
if not os.path.exists(self.google_users_dir):
os.makedirs(self.google_users_dir)
shutil.copy(updated_users_file, self.google_users_file)
file_utils.SetPermissions(self.google_users_file, mode=0o600, uid=0, gid=0) | python | def SetConfiguredUsers(self, users):
"""Set the list of configured Google user accounts.
Args:
users: list, the username strings of the Linux accounts.
"""
prefix = self.logger.name + '-'
with tempfile.NamedTemporaryFile(
mode='w', prefix=prefix, delete=True) as updated_users:
updated_users_file = updated_users.name
for user in users:
updated_users.write(user + '\n')
updated_users.flush()
if not os.path.exists(self.google_users_dir):
os.makedirs(self.google_users_dir)
shutil.copy(updated_users_file, self.google_users_file)
file_utils.SetPermissions(self.google_users_file, mode=0o600, uid=0, gid=0) | [
"def",
"SetConfiguredUsers",
"(",
"self",
",",
"users",
")",
":",
"prefix",
"=",
"self",
".",
"logger",
".",
"name",
"+",
"'-'",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"mode",
"=",
"'w'",
",",
"prefix",
"=",
"prefix",
",",
"delete",
"=",
"... | Set the list of configured Google user accounts.
Args:
users: list, the username strings of the Linux accounts. | [
"Set",
"the",
"list",
"of",
"configured",
"Google",
"user",
"accounts",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L310-L327 | train | 229,338 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils.UpdateUser | def UpdateUser(self, user, ssh_keys):
"""Update a Linux user with authorized SSH keys.
Args:
user: string, the name of the Linux user account.
ssh_keys: list, the SSH key strings associated with the user.
Returns:
bool, True if the user account updated successfully.
"""
if not bool(USER_REGEX.match(user)):
self.logger.warning('Invalid user account name %s.', user)
return False
if not self._GetUser(user):
# User does not exist. Attempt to create the user and add them to the
# appropriate user groups.
if not (self._AddUser(user)
and self._UpdateUserGroups(user, self.groups)):
return False
# Add the user to the google sudoers group.
if not self._UpdateSudoer(user, sudoer=True):
return False
# Don't try to manage account SSH keys with a shell set to disable
# logins. This helps avoid problems caused by operator and root sharing
# a home directory in CentOS and RHEL.
pw_entry = self._GetUser(user)
if pw_entry and os.path.basename(pw_entry.pw_shell) == 'nologin':
message = 'Not updating user %s. User set `nologin` as login shell.'
self.logger.debug(message, user)
return True
try:
self._UpdateAuthorizedKeys(user, ssh_keys)
except (IOError, OSError) as e:
message = 'Could not update the authorized keys file for user %s. %s.'
self.logger.warning(message, user, str(e))
return False
else:
return True | python | def UpdateUser(self, user, ssh_keys):
"""Update a Linux user with authorized SSH keys.
Args:
user: string, the name of the Linux user account.
ssh_keys: list, the SSH key strings associated with the user.
Returns:
bool, True if the user account updated successfully.
"""
if not bool(USER_REGEX.match(user)):
self.logger.warning('Invalid user account name %s.', user)
return False
if not self._GetUser(user):
# User does not exist. Attempt to create the user and add them to the
# appropriate user groups.
if not (self._AddUser(user)
and self._UpdateUserGroups(user, self.groups)):
return False
# Add the user to the google sudoers group.
if not self._UpdateSudoer(user, sudoer=True):
return False
# Don't try to manage account SSH keys with a shell set to disable
# logins. This helps avoid problems caused by operator and root sharing
# a home directory in CentOS and RHEL.
pw_entry = self._GetUser(user)
if pw_entry and os.path.basename(pw_entry.pw_shell) == 'nologin':
message = 'Not updating user %s. User set `nologin` as login shell.'
self.logger.debug(message, user)
return True
try:
self._UpdateAuthorizedKeys(user, ssh_keys)
except (IOError, OSError) as e:
message = 'Could not update the authorized keys file for user %s. %s.'
self.logger.warning(message, user, str(e))
return False
else:
return True | [
"def",
"UpdateUser",
"(",
"self",
",",
"user",
",",
"ssh_keys",
")",
":",
"if",
"not",
"bool",
"(",
"USER_REGEX",
".",
"match",
"(",
"user",
")",
")",
":",
"self",
".",
"logger",
".",
"warning",
"(",
"'Invalid user account name %s.'",
",",
"user",
")",
... | Update a Linux user with authorized SSH keys.
Args:
user: string, the name of the Linux user account.
ssh_keys: list, the SSH key strings associated with the user.
Returns:
bool, True if the user account updated successfully. | [
"Update",
"a",
"Linux",
"user",
"with",
"authorized",
"SSH",
"keys",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L329-L368 | train | 229,339 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py | AccountsUtils.RemoveUser | def RemoveUser(self, user):
"""Remove a Linux user account.
Args:
user: string, the Linux user account to remove.
"""
self.logger.info('Removing user %s.', user)
if self.remove:
command = self.userdel_cmd.format(user=user)
try:
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not remove user %s. %s.', user, str(e))
else:
self.logger.info('Removed user account %s.', user)
self._RemoveAuthorizedKeys(user)
self._UpdateSudoer(user, sudoer=False) | python | def RemoveUser(self, user):
"""Remove a Linux user account.
Args:
user: string, the Linux user account to remove.
"""
self.logger.info('Removing user %s.', user)
if self.remove:
command = self.userdel_cmd.format(user=user)
try:
subprocess.check_call(command.split(' '))
except subprocess.CalledProcessError as e:
self.logger.warning('Could not remove user %s. %s.', user, str(e))
else:
self.logger.info('Removed user account %s.', user)
self._RemoveAuthorizedKeys(user)
self._UpdateSudoer(user, sudoer=False) | [
"def",
"RemoveUser",
"(",
"self",
",",
"user",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'Removing user %s.'",
",",
"user",
")",
"if",
"self",
".",
"remove",
":",
"command",
"=",
"self",
".",
"userdel_cmd",
".",
"format",
"(",
"user",
"=",
... | Remove a Linux user account.
Args:
user: string, the Linux user account to remove. | [
"Remove",
"a",
"Linux",
"user",
"account",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_utils.py#L370-L386 | train | 229,340 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py | OsLoginUtils._RunOsLoginControl | def _RunOsLoginControl(self, params):
"""Run the OS Login control script.
Args:
params: list, the params to pass to the script
Returns:
int, the return code from the call, or None if the script is not found.
"""
try:
return subprocess.call([constants.OSLOGIN_CONTROL_SCRIPT] + params)
except OSError as e:
if e.errno == errno.ENOENT:
return None
else:
raise | python | def _RunOsLoginControl(self, params):
"""Run the OS Login control script.
Args:
params: list, the params to pass to the script
Returns:
int, the return code from the call, or None if the script is not found.
"""
try:
return subprocess.call([constants.OSLOGIN_CONTROL_SCRIPT] + params)
except OSError as e:
if e.errno == errno.ENOENT:
return None
else:
raise | [
"def",
"_RunOsLoginControl",
"(",
"self",
",",
"params",
")",
":",
"try",
":",
"return",
"subprocess",
".",
"call",
"(",
"[",
"constants",
".",
"OSLOGIN_CONTROL_SCRIPT",
"]",
"+",
"params",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"e",
".",
"errno... | Run the OS Login control script.
Args:
params: list, the params to pass to the script
Returns:
int, the return code from the call, or None if the script is not found. | [
"Run",
"the",
"OS",
"Login",
"control",
"script",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py#L41-L56 | train | 229,341 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py | OsLoginUtils._GetStatus | def _GetStatus(self, two_factor=False):
"""Check whether OS Login is installed.
Args:
two_factor: bool, True if two factor should be enabled.
Returns:
bool, True if OS Login is installed.
"""
params = ['status']
if two_factor:
params += ['--twofactor']
retcode = self._RunOsLoginControl(params)
if retcode is None:
if self.oslogin_installed:
self.logger.warning('OS Login not installed.')
self.oslogin_installed = False
return None
# Prevent log spam when OS Login is not installed.
self.oslogin_installed = True
if not os.path.exists(constants.OSLOGIN_NSS_CACHE):
return False
return not retcode | python | def _GetStatus(self, two_factor=False):
"""Check whether OS Login is installed.
Args:
two_factor: bool, True if two factor should be enabled.
Returns:
bool, True if OS Login is installed.
"""
params = ['status']
if two_factor:
params += ['--twofactor']
retcode = self._RunOsLoginControl(params)
if retcode is None:
if self.oslogin_installed:
self.logger.warning('OS Login not installed.')
self.oslogin_installed = False
return None
# Prevent log spam when OS Login is not installed.
self.oslogin_installed = True
if not os.path.exists(constants.OSLOGIN_NSS_CACHE):
return False
return not retcode | [
"def",
"_GetStatus",
"(",
"self",
",",
"two_factor",
"=",
"False",
")",
":",
"params",
"=",
"[",
"'status'",
"]",
"if",
"two_factor",
":",
"params",
"+=",
"[",
"'--twofactor'",
"]",
"retcode",
"=",
"self",
".",
"_RunOsLoginControl",
"(",
"params",
")",
"... | Check whether OS Login is installed.
Args:
two_factor: bool, True if two factor should be enabled.
Returns:
bool, True if OS Login is installed. | [
"Check",
"whether",
"OS",
"Login",
"is",
"installed",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py#L58-L81 | train | 229,342 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py | OsLoginUtils._RunOsLoginNssCache | def _RunOsLoginNssCache(self):
"""Run the OS Login NSS cache binary.
Returns:
int, the return code from the call, or None if the script is not found.
"""
try:
return subprocess.call([constants.OSLOGIN_NSS_CACHE_SCRIPT])
except OSError as e:
if e.errno == errno.ENOENT:
return None
else:
raise | python | def _RunOsLoginNssCache(self):
"""Run the OS Login NSS cache binary.
Returns:
int, the return code from the call, or None if the script is not found.
"""
try:
return subprocess.call([constants.OSLOGIN_NSS_CACHE_SCRIPT])
except OSError as e:
if e.errno == errno.ENOENT:
return None
else:
raise | [
"def",
"_RunOsLoginNssCache",
"(",
"self",
")",
":",
"try",
":",
"return",
"subprocess",
".",
"call",
"(",
"[",
"constants",
".",
"OSLOGIN_NSS_CACHE_SCRIPT",
"]",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"==",
"errno",
".",
"ENO... | Run the OS Login NSS cache binary.
Returns:
int, the return code from the call, or None if the script is not found. | [
"Run",
"the",
"OS",
"Login",
"NSS",
"cache",
"binary",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py#L83-L95 | train | 229,343 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py | OsLoginUtils._RemoveOsLoginNssCache | def _RemoveOsLoginNssCache(self):
"""Remove the OS Login NSS cache file."""
if os.path.exists(constants.OSLOGIN_NSS_CACHE):
try:
os.remove(constants.OSLOGIN_NSS_CACHE)
except OSError as e:
if e.errno != errno.ENOENT:
raise | python | def _RemoveOsLoginNssCache(self):
"""Remove the OS Login NSS cache file."""
if os.path.exists(constants.OSLOGIN_NSS_CACHE):
try:
os.remove(constants.OSLOGIN_NSS_CACHE)
except OSError as e:
if e.errno != errno.ENOENT:
raise | [
"def",
"_RemoveOsLoginNssCache",
"(",
"self",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"constants",
".",
"OSLOGIN_NSS_CACHE",
")",
":",
"try",
":",
"os",
".",
"remove",
"(",
"constants",
".",
"OSLOGIN_NSS_CACHE",
")",
"except",
"OSError",
"as... | Remove the OS Login NSS cache file. | [
"Remove",
"the",
"OS",
"Login",
"NSS",
"cache",
"file",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py#L97-L104 | train | 229,344 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py | OsLoginUtils.UpdateOsLogin | def UpdateOsLogin(self, oslogin_desired, two_factor_desired=False):
"""Update whether OS Login is enabled and update NSS cache if necessary.
Args:
oslogin_desired: bool, enable OS Login if True, disable if False.
two_factor_desired: bool, enable two factor if True, disable if False.
Returns:
int, the return code from updating OS Login, or None if not present.
"""
oslogin_configured = self._GetStatus(two_factor=False)
if oslogin_configured is None:
return None
two_factor_configured = self._GetStatus(two_factor=True)
# Two factor can only be enabled when OS Login is enabled.
two_factor_desired = two_factor_desired and oslogin_desired
if oslogin_desired:
params = ['activate']
if two_factor_desired:
params += ['--twofactor']
# OS Login is desired and not enabled.
if not oslogin_configured:
self.logger.info('Activating OS Login.')
return self._RunOsLoginControl(params) or self._RunOsLoginNssCache()
# Enable two factor authentication.
if two_factor_desired and not two_factor_configured:
self.logger.info('Activating OS Login two factor authentication.')
return self._RunOsLoginControl(params) or self._RunOsLoginNssCache()
# Deactivate two factor authentication.
if two_factor_configured and not two_factor_desired:
self.logger.info('Reactivating OS Login with two factor disabled.')
return (self._RunOsLoginControl(['deactivate'])
or self._RunOsLoginControl(params))
# OS Login features are already enabled. Update the cache if appropriate.
current_time = time.time()
if current_time - self.update_time > NSS_CACHE_DURATION_SEC:
self.update_time = current_time
return self._RunOsLoginNssCache()
elif oslogin_configured:
self.logger.info('Deactivating OS Login.')
return (self._RunOsLoginControl(['deactivate'])
or self._RemoveOsLoginNssCache())
# No action was needed.
return 0 | python | def UpdateOsLogin(self, oslogin_desired, two_factor_desired=False):
"""Update whether OS Login is enabled and update NSS cache if necessary.
Args:
oslogin_desired: bool, enable OS Login if True, disable if False.
two_factor_desired: bool, enable two factor if True, disable if False.
Returns:
int, the return code from updating OS Login, or None if not present.
"""
oslogin_configured = self._GetStatus(two_factor=False)
if oslogin_configured is None:
return None
two_factor_configured = self._GetStatus(two_factor=True)
# Two factor can only be enabled when OS Login is enabled.
two_factor_desired = two_factor_desired and oslogin_desired
if oslogin_desired:
params = ['activate']
if two_factor_desired:
params += ['--twofactor']
# OS Login is desired and not enabled.
if not oslogin_configured:
self.logger.info('Activating OS Login.')
return self._RunOsLoginControl(params) or self._RunOsLoginNssCache()
# Enable two factor authentication.
if two_factor_desired and not two_factor_configured:
self.logger.info('Activating OS Login two factor authentication.')
return self._RunOsLoginControl(params) or self._RunOsLoginNssCache()
# Deactivate two factor authentication.
if two_factor_configured and not two_factor_desired:
self.logger.info('Reactivating OS Login with two factor disabled.')
return (self._RunOsLoginControl(['deactivate'])
or self._RunOsLoginControl(params))
# OS Login features are already enabled. Update the cache if appropriate.
current_time = time.time()
if current_time - self.update_time > NSS_CACHE_DURATION_SEC:
self.update_time = current_time
return self._RunOsLoginNssCache()
elif oslogin_configured:
self.logger.info('Deactivating OS Login.')
return (self._RunOsLoginControl(['deactivate'])
or self._RemoveOsLoginNssCache())
# No action was needed.
return 0 | [
"def",
"UpdateOsLogin",
"(",
"self",
",",
"oslogin_desired",
",",
"two_factor_desired",
"=",
"False",
")",
":",
"oslogin_configured",
"=",
"self",
".",
"_GetStatus",
"(",
"two_factor",
"=",
"False",
")",
"if",
"oslogin_configured",
"is",
"None",
":",
"return",
... | Update whether OS Login is enabled and update NSS cache if necessary.
Args:
oslogin_desired: bool, enable OS Login if True, disable if False.
two_factor_desired: bool, enable two factor if True, disable if False.
Returns:
int, the return code from updating OS Login, or None if not present. | [
"Update",
"whether",
"OS",
"Login",
"is",
"enabled",
"and",
"update",
"NSS",
"cache",
"if",
"necessary",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/oslogin_utils.py#L106-L152 | train | 229,345 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/helpers.py | CallDhclient | def CallDhclient(
interfaces, logger, dhclient_script=None):
"""Configure the network interfaces using dhclient.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
dhclient_script: string, the path to a dhclient script used by dhclient.
"""
logger.info('Enabling the Ethernet interfaces %s.', interfaces)
dhclient_command = ['dhclient']
if dhclient_script and os.path.exists(dhclient_script):
dhclient_command += ['-sf', dhclient_script]
try:
subprocess.check_call(dhclient_command + ['-x'] + interfaces)
subprocess.check_call(dhclient_command + interfaces)
except subprocess.CalledProcessError:
logger.warning('Could not enable interfaces %s.', interfaces) | python | def CallDhclient(
interfaces, logger, dhclient_script=None):
"""Configure the network interfaces using dhclient.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
dhclient_script: string, the path to a dhclient script used by dhclient.
"""
logger.info('Enabling the Ethernet interfaces %s.', interfaces)
dhclient_command = ['dhclient']
if dhclient_script and os.path.exists(dhclient_script):
dhclient_command += ['-sf', dhclient_script]
try:
subprocess.check_call(dhclient_command + ['-x'] + interfaces)
subprocess.check_call(dhclient_command + interfaces)
except subprocess.CalledProcessError:
logger.warning('Could not enable interfaces %s.', interfaces) | [
"def",
"CallDhclient",
"(",
"interfaces",
",",
"logger",
",",
"dhclient_script",
"=",
"None",
")",
":",
"logger",
".",
"info",
"(",
"'Enabling the Ethernet interfaces %s.'",
",",
"interfaces",
")",
"dhclient_command",
"=",
"[",
"'dhclient'",
"]",
"if",
"dhclient_s... | Configure the network interfaces using dhclient.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
dhclient_script: string, the path to a dhclient script used by dhclient. | [
"Configure",
"the",
"network",
"interfaces",
"using",
"dhclient",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/helpers.py#L22-L42 | train | 229,346 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/helpers.py | CallHwclock | def CallHwclock(logger):
"""Sync clock using hwclock.
Args:
logger: logger object, used to write to SysLog and serial port.
"""
command = ['/sbin/hwclock', '--hctosys']
try:
subprocess.check_call(command)
except subprocess.CalledProcessError:
logger.warning('Failed to sync system time with hardware clock.')
else:
logger.info('Synced system time with hardware clock.') | python | def CallHwclock(logger):
"""Sync clock using hwclock.
Args:
logger: logger object, used to write to SysLog and serial port.
"""
command = ['/sbin/hwclock', '--hctosys']
try:
subprocess.check_call(command)
except subprocess.CalledProcessError:
logger.warning('Failed to sync system time with hardware clock.')
else:
logger.info('Synced system time with hardware clock.') | [
"def",
"CallHwclock",
"(",
"logger",
")",
":",
"command",
"=",
"[",
"'/sbin/hwclock'",
",",
"'--hctosys'",
"]",
"try",
":",
"subprocess",
".",
"check_call",
"(",
"command",
")",
"except",
"subprocess",
".",
"CalledProcessError",
":",
"logger",
".",
"warning",
... | Sync clock using hwclock.
Args:
logger: logger object, used to write to SysLog and serial port. | [
"Sync",
"clock",
"using",
"hwclock",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/helpers.py#L45-L57 | train | 229,347 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/helpers.py | CallNtpdate | def CallNtpdate(logger):
"""Sync clock using ntpdate.
Args:
logger: logger object, used to write to SysLog and serial port.
"""
ntpd_inactive = subprocess.call(['service', 'ntpd', 'status'])
try:
if not ntpd_inactive:
subprocess.check_call(['service', 'ntpd', 'stop'])
subprocess.check_call(
'ntpdate `awk \'$1=="server" {print $2}\' /etc/ntp.conf`', shell=True)
if not ntpd_inactive:
subprocess.check_call(['service', 'ntpd', 'start'])
except subprocess.CalledProcessError:
logger.warning('Failed to sync system time with ntp server.')
else:
logger.info('Synced system time with ntp server.') | python | def CallNtpdate(logger):
"""Sync clock using ntpdate.
Args:
logger: logger object, used to write to SysLog and serial port.
"""
ntpd_inactive = subprocess.call(['service', 'ntpd', 'status'])
try:
if not ntpd_inactive:
subprocess.check_call(['service', 'ntpd', 'stop'])
subprocess.check_call(
'ntpdate `awk \'$1=="server" {print $2}\' /etc/ntp.conf`', shell=True)
if not ntpd_inactive:
subprocess.check_call(['service', 'ntpd', 'start'])
except subprocess.CalledProcessError:
logger.warning('Failed to sync system time with ntp server.')
else:
logger.info('Synced system time with ntp server.') | [
"def",
"CallNtpdate",
"(",
"logger",
")",
":",
"ntpd_inactive",
"=",
"subprocess",
".",
"call",
"(",
"[",
"'service'",
",",
"'ntpd'",
",",
"'status'",
"]",
")",
"try",
":",
"if",
"not",
"ntpd_inactive",
":",
"subprocess",
".",
"check_call",
"(",
"[",
"'s... | Sync clock using ntpdate.
Args:
logger: logger object, used to write to SysLog and serial port. | [
"Sync",
"clock",
"using",
"ntpdate",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/helpers.py#L60-L77 | train | 229,348 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/boto/boto_config.py | BotoConfig._GetNumericProjectId | def _GetNumericProjectId(self):
"""Get the numeric project ID for this VM.
Returns:
string, the numeric project ID if one is found.
"""
project_id = 'project/numeric-project-id'
return self.watcher.GetMetadata(metadata_key=project_id, recursive=False) | python | def _GetNumericProjectId(self):
"""Get the numeric project ID for this VM.
Returns:
string, the numeric project ID if one is found.
"""
project_id = 'project/numeric-project-id'
return self.watcher.GetMetadata(metadata_key=project_id, recursive=False) | [
"def",
"_GetNumericProjectId",
"(",
"self",
")",
":",
"project_id",
"=",
"'project/numeric-project-id'",
"return",
"self",
".",
"watcher",
".",
"GetMetadata",
"(",
"metadata_key",
"=",
"project_id",
",",
"recursive",
"=",
"False",
")"
] | Get the numeric project ID for this VM.
Returns:
string, the numeric project ID if one is found. | [
"Get",
"the",
"numeric",
"project",
"ID",
"for",
"this",
"VM",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/boto/boto_config.py#L57-L64 | train | 229,349 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/boto/boto_config.py | BotoConfig._CreateConfig | def _CreateConfig(self, project_id):
"""Create the boto config to support standalone GSUtil.
Args:
project_id: string, the project ID to use in the config file.
"""
project_id = project_id or self._GetNumericProjectId()
# Our project doesn't support service accounts.
if not project_id:
return
self.boto_config_header %= (
self.boto_config_script, self.boto_config_template)
config = config_manager.ConfigManager(
config_file=self.boto_config_template,
config_header=self.boto_config_header)
boto_dir = os.path.dirname(self.boto_config_script)
config.SetOption('GSUtil', 'default_project_id', project_id)
config.SetOption('GSUtil', 'default_api_version', '2')
config.SetOption('GoogleCompute', 'service_account', 'default')
config.SetOption('Plugin', 'plugin_directory', boto_dir)
config.WriteConfig(config_file=self.boto_config) | python | def _CreateConfig(self, project_id):
"""Create the boto config to support standalone GSUtil.
Args:
project_id: string, the project ID to use in the config file.
"""
project_id = project_id or self._GetNumericProjectId()
# Our project doesn't support service accounts.
if not project_id:
return
self.boto_config_header %= (
self.boto_config_script, self.boto_config_template)
config = config_manager.ConfigManager(
config_file=self.boto_config_template,
config_header=self.boto_config_header)
boto_dir = os.path.dirname(self.boto_config_script)
config.SetOption('GSUtil', 'default_project_id', project_id)
config.SetOption('GSUtil', 'default_api_version', '2')
config.SetOption('GoogleCompute', 'service_account', 'default')
config.SetOption('Plugin', 'plugin_directory', boto_dir)
config.WriteConfig(config_file=self.boto_config) | [
"def",
"_CreateConfig",
"(",
"self",
",",
"project_id",
")",
":",
"project_id",
"=",
"project_id",
"or",
"self",
".",
"_GetNumericProjectId",
"(",
")",
"# Our project doesn't support service accounts.",
"if",
"not",
"project_id",
":",
"return",
"self",
".",
"boto_co... | Create the boto config to support standalone GSUtil.
Args:
project_id: string, the project ID to use in the config file. | [
"Create",
"the",
"boto",
"config",
"to",
"support",
"standalone",
"GSUtil",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/boto/boto_config.py#L66-L89 | train | 229,350 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/ip_forwarding_utils.py | IpForwardingUtilsIproute._CreateRouteOptions | def _CreateRouteOptions(self, **kwargs):
"""Create a dictionary of parameters to append to the ip route command.
Args:
**kwargs: dict, the string parameters to update in the ip route command.
Returns:
dict, the string parameters to append to the ip route command.
"""
options = {
'proto': self.proto_id,
'scope': 'host',
}
options.update(kwargs)
return options | python | def _CreateRouteOptions(self, **kwargs):
"""Create a dictionary of parameters to append to the ip route command.
Args:
**kwargs: dict, the string parameters to update in the ip route command.
Returns:
dict, the string parameters to append to the ip route command.
"""
options = {
'proto': self.proto_id,
'scope': 'host',
}
options.update(kwargs)
return options | [
"def",
"_CreateRouteOptions",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"options",
"=",
"{",
"'proto'",
":",
"self",
".",
"proto_id",
",",
"'scope'",
":",
"'host'",
",",
"}",
"options",
".",
"update",
"(",
"kwargs",
")",
"return",
"options"
] | Create a dictionary of parameters to append to the ip route command.
Args:
**kwargs: dict, the string parameters to update in the ip route command.
Returns:
dict, the string parameters to append to the ip route command. | [
"Create",
"a",
"dictionary",
"of",
"parameters",
"to",
"append",
"to",
"the",
"ip",
"route",
"command",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/ip_forwarding_utils.py#L97-L111 | train | 229,351 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/ip_forwarding_utils.py | IpForwardingUtilsIproute._RunIpRoute | def _RunIpRoute(self, args=None, options=None):
"""Run a command with ip route and return the response.
Args:
args: list, the string ip route command args to execute.
options: dict, the string parameters to append to the ip route command.
Returns:
string, the standard output from the ip route command execution.
"""
args = args or []
options = options or {}
command = ['ip', 'route']
command.extend(args)
for item in options.items():
command.extend(item)
try:
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
except OSError as e:
self.logger.warning('Exception running %s. %s.', command, str(e))
else:
if process.returncode:
message = 'Non-zero exit status running %s. %s.'
self.logger.warning(message, command, stderr.strip())
else:
return stdout.decode('utf-8', 'replace')
return '' | python | def _RunIpRoute(self, args=None, options=None):
"""Run a command with ip route and return the response.
Args:
args: list, the string ip route command args to execute.
options: dict, the string parameters to append to the ip route command.
Returns:
string, the standard output from the ip route command execution.
"""
args = args or []
options = options or {}
command = ['ip', 'route']
command.extend(args)
for item in options.items():
command.extend(item)
try:
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
except OSError as e:
self.logger.warning('Exception running %s. %s.', command, str(e))
else:
if process.returncode:
message = 'Non-zero exit status running %s. %s.'
self.logger.warning(message, command, stderr.strip())
else:
return stdout.decode('utf-8', 'replace')
return '' | [
"def",
"_RunIpRoute",
"(",
"self",
",",
"args",
"=",
"None",
",",
"options",
"=",
"None",
")",
":",
"args",
"=",
"args",
"or",
"[",
"]",
"options",
"=",
"options",
"or",
"{",
"}",
"command",
"=",
"[",
"'ip'",
",",
"'route'",
"]",
"command",
".",
... | Run a command with ip route and return the response.
Args:
args: list, the string ip route command args to execute.
options: dict, the string parameters to append to the ip route command.
Returns:
string, the standard output from the ip route command execution. | [
"Run",
"a",
"command",
"with",
"ip",
"route",
"and",
"return",
"the",
"response",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/ip_forwarding_utils.py#L113-L141 | train | 229,352 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/ip_forwarding_utils.py | IpForwardingUtilsIfconfig.RemoveForwardedIp | def RemoveForwardedIp(self, address, interface):
"""Delete an IP address on the network interface.
Args:
address: string, the IP address to configure.
interface: string, the output device to use.
"""
ip = netaddr.IPNetwork(address)
self._RunIfconfig(args=[interface, '-alias', str(ip.ip)]) | python | def RemoveForwardedIp(self, address, interface):
"""Delete an IP address on the network interface.
Args:
address: string, the IP address to configure.
interface: string, the output device to use.
"""
ip = netaddr.IPNetwork(address)
self._RunIfconfig(args=[interface, '-alias', str(ip.ip)]) | [
"def",
"RemoveForwardedIp",
"(",
"self",
",",
"address",
",",
"interface",
")",
":",
"ip",
"=",
"netaddr",
".",
"IPNetwork",
"(",
"address",
")",
"self",
".",
"_RunIfconfig",
"(",
"args",
"=",
"[",
"interface",
",",
"'-alias'",
",",
"str",
"(",
"ip",
"... | Delete an IP address on the network interface.
Args:
address: string, the IP address to configure.
interface: string, the output device to use. | [
"Delete",
"an",
"IP",
"address",
"on",
"the",
"network",
"interface",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/ip_forwarding_utils.py#L294-L302 | train | 229,353 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/boto/compute_auth.py | ComputeAuth._GetGsScopes | def _GetGsScopes(self):
"""Return all Google Storage scopes available on this VM."""
service_accounts = self.watcher.GetMetadata(metadata_key=self.metadata_key)
try:
scopes = service_accounts[self.service_account]['scopes']
return list(GS_SCOPES.intersection(set(scopes))) if scopes else None
except KeyError:
return None | python | def _GetGsScopes(self):
"""Return all Google Storage scopes available on this VM."""
service_accounts = self.watcher.GetMetadata(metadata_key=self.metadata_key)
try:
scopes = service_accounts[self.service_account]['scopes']
return list(GS_SCOPES.intersection(set(scopes))) if scopes else None
except KeyError:
return None | [
"def",
"_GetGsScopes",
"(",
"self",
")",
":",
"service_accounts",
"=",
"self",
".",
"watcher",
".",
"GetMetadata",
"(",
"metadata_key",
"=",
"self",
".",
"metadata_key",
")",
"try",
":",
"scopes",
"=",
"service_accounts",
"[",
"self",
".",
"service_account",
... | Return all Google Storage scopes available on this VM. | [
"Return",
"all",
"Google",
"Storage",
"scopes",
"available",
"on",
"this",
"VM",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/boto/compute_auth.py#L50-L57 | train | 229,354 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/boto/compute_auth.py | ComputeAuth._GetAccessToken | def _GetAccessToken(self):
"""Return an OAuth 2.0 access token for Google Storage."""
service_accounts = self.watcher.GetMetadata(metadata_key=self.metadata_key)
try:
return service_accounts[self.service_account]['token']['access_token']
except KeyError:
return None | python | def _GetAccessToken(self):
"""Return an OAuth 2.0 access token for Google Storage."""
service_accounts = self.watcher.GetMetadata(metadata_key=self.metadata_key)
try:
return service_accounts[self.service_account]['token']['access_token']
except KeyError:
return None | [
"def",
"_GetAccessToken",
"(",
"self",
")",
":",
"service_accounts",
"=",
"self",
".",
"watcher",
".",
"GetMetadata",
"(",
"metadata_key",
"=",
"self",
".",
"metadata_key",
")",
"try",
":",
"return",
"service_accounts",
"[",
"self",
".",
"service_account",
"]"... | Return an OAuth 2.0 access token for Google Storage. | [
"Return",
"an",
"OAuth",
"2",
".",
"0",
"access",
"token",
"for",
"Google",
"Storage",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/boto/compute_auth.py#L59-L65 | train | 229,355 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/clock_skew/clock_skew_daemon.py | ClockSkewDaemon.HandleClockSync | def HandleClockSync(self, response):
"""Called when clock drift token changes.
Args:
response: string, the metadata response with the new drift token value.
"""
self.logger.info('Clock drift token has changed: %s.', response)
self.distro_utils.HandleClockSync(self.logger) | python | def HandleClockSync(self, response):
"""Called when clock drift token changes.
Args:
response: string, the metadata response with the new drift token value.
"""
self.logger.info('Clock drift token has changed: %s.', response)
self.distro_utils.HandleClockSync(self.logger) | [
"def",
"HandleClockSync",
"(",
"self",
",",
"response",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'Clock drift token has changed: %s.'",
",",
"response",
")",
"self",
".",
"distro_utils",
".",
"HandleClockSync",
"(",
"self",
".",
"logger",
")"
] | Called when clock drift token changes.
Args:
response: string, the metadata response with the new drift token value. | [
"Called",
"when",
"clock",
"drift",
"token",
"changes",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/clock_skew/clock_skew_daemon.py#L56-L63 | train | 229,356 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/el_7/utils.py | Utils._DisableNetworkManager | def _DisableNetworkManager(self, interfaces, logger):
"""Disable network manager management on a list of network interfaces.
Args:
interfaces: list of string, the output device names enable.
logger: logger object, used to write to SysLog and serial port.
"""
for interface in interfaces:
interface_config = os.path.join(
self.network_path, 'ifcfg-%s' % interface)
if os.path.exists(interface_config):
self._ModifyInterface(
interface_config, 'DEVICE', interface, replace=False)
self._ModifyInterface(
interface_config, 'NM_CONTROLLED', 'no', replace=True)
else:
with open(interface_config, 'w') as interface_file:
interface_content = [
'# Added by Google.',
'BOOTPROTO=none',
'DEFROUTE=no',
'DEVICE=%s' % interface,
'IPV6INIT=no',
'NM_CONTROLLED=no',
'NOZEROCONF=yes',
'',
]
interface_file.write('\n'.join(interface_content))
logger.info('Created config file for interface %s.', interface) | python | def _DisableNetworkManager(self, interfaces, logger):
"""Disable network manager management on a list of network interfaces.
Args:
interfaces: list of string, the output device names enable.
logger: logger object, used to write to SysLog and serial port.
"""
for interface in interfaces:
interface_config = os.path.join(
self.network_path, 'ifcfg-%s' % interface)
if os.path.exists(interface_config):
self._ModifyInterface(
interface_config, 'DEVICE', interface, replace=False)
self._ModifyInterface(
interface_config, 'NM_CONTROLLED', 'no', replace=True)
else:
with open(interface_config, 'w') as interface_file:
interface_content = [
'# Added by Google.',
'BOOTPROTO=none',
'DEFROUTE=no',
'DEVICE=%s' % interface,
'IPV6INIT=no',
'NM_CONTROLLED=no',
'NOZEROCONF=yes',
'',
]
interface_file.write('\n'.join(interface_content))
logger.info('Created config file for interface %s.', interface) | [
"def",
"_DisableNetworkManager",
"(",
"self",
",",
"interfaces",
",",
"logger",
")",
":",
"for",
"interface",
"in",
"interfaces",
":",
"interface_config",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"network_path",
",",
"'ifcfg-%s'",
"%",
"interfa... | Disable network manager management on a list of network interfaces.
Args:
interfaces: list of string, the output device names enable.
logger: logger object, used to write to SysLog and serial port. | [
"Disable",
"network",
"manager",
"management",
"on",
"a",
"list",
"of",
"network",
"interfaces",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/el_7/utils.py#L47-L75 | train | 229,357 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/el_7/utils.py | Utils._ModifyInterface | def _ModifyInterface(
self, interface_config, config_key, config_value, replace=False):
"""Write a value to a config file if not already present.
Args:
interface_config: string, the path to a config file.
config_key: string, the configuration key to set.
config_value: string, the value to set for the configuration key.
replace: bool, replace the configuration option if already present.
"""
config_entry = '%s=%s' % (config_key, config_value)
if not open(interface_config).read().count(config_key):
with open(interface_config, 'a') as config:
config.write('%s\n' % config_entry)
elif replace:
for line in fileinput.input(interface_config, inplace=True):
print(re.sub(r'%s=.*' % config_key, config_entry, line.rstrip())) | python | def _ModifyInterface(
self, interface_config, config_key, config_value, replace=False):
"""Write a value to a config file if not already present.
Args:
interface_config: string, the path to a config file.
config_key: string, the configuration key to set.
config_value: string, the value to set for the configuration key.
replace: bool, replace the configuration option if already present.
"""
config_entry = '%s=%s' % (config_key, config_value)
if not open(interface_config).read().count(config_key):
with open(interface_config, 'a') as config:
config.write('%s\n' % config_entry)
elif replace:
for line in fileinput.input(interface_config, inplace=True):
print(re.sub(r'%s=.*' % config_key, config_entry, line.rstrip())) | [
"def",
"_ModifyInterface",
"(",
"self",
",",
"interface_config",
",",
"config_key",
",",
"config_value",
",",
"replace",
"=",
"False",
")",
":",
"config_entry",
"=",
"'%s=%s'",
"%",
"(",
"config_key",
",",
"config_value",
")",
"if",
"not",
"open",
"(",
"inte... | Write a value to a config file if not already present.
Args:
interface_config: string, the path to a config file.
config_key: string, the configuration key to set.
config_value: string, the value to set for the configuration key.
replace: bool, replace the configuration option if already present. | [
"Write",
"a",
"value",
"to",
"a",
"config",
"file",
"if",
"not",
"already",
"present",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/el_7/utils.py#L77-L93 | train | 229,358 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py | AccountsDaemon._HasExpired | def _HasExpired(self, key):
"""Check whether an SSH key has expired.
Uses Google-specific semantics of the OpenSSH public key format's comment
field to determine if an SSH key is past its expiration timestamp, and
therefore no longer to be trusted. This format is still subject to change.
Reliance on it in any way is at your own risk.
Args:
key: string, a single public key entry in OpenSSH public key file format.
This will be checked for Google-specific comment semantics, and if
present, those will be analysed.
Returns:
bool, True if the key has Google-specific comment semantics and has an
expiration timestamp in the past, or False otherwise.
"""
self.logger.debug('Processing key: %s.', key)
try:
schema, json_str = key.split(None, 3)[2:]
except (ValueError, AttributeError):
self.logger.debug('No schema identifier. Not expiring key.')
return False
if schema != 'google-ssh':
self.logger.debug('Invalid schema %s. Not expiring key.', schema)
return False
try:
json_obj = json.loads(json_str)
except ValueError:
self.logger.debug('Invalid JSON %s. Not expiring key.', json_str)
return False
if 'expireOn' not in json_obj:
self.logger.debug('No expiration timestamp. Not expiring key.')
return False
expire_str = json_obj['expireOn']
format_str = '%Y-%m-%dT%H:%M:%S+0000'
try:
expire_time = datetime.datetime.strptime(expire_str, format_str)
except ValueError:
self.logger.warning(
'Expiration timestamp "%s" not in format %s. Not expiring key.',
expire_str, format_str)
return False
# Expire the key if and only if we have exceeded the expiration timestamp.
return datetime.datetime.utcnow() > expire_time | python | def _HasExpired(self, key):
"""Check whether an SSH key has expired.
Uses Google-specific semantics of the OpenSSH public key format's comment
field to determine if an SSH key is past its expiration timestamp, and
therefore no longer to be trusted. This format is still subject to change.
Reliance on it in any way is at your own risk.
Args:
key: string, a single public key entry in OpenSSH public key file format.
This will be checked for Google-specific comment semantics, and if
present, those will be analysed.
Returns:
bool, True if the key has Google-specific comment semantics and has an
expiration timestamp in the past, or False otherwise.
"""
self.logger.debug('Processing key: %s.', key)
try:
schema, json_str = key.split(None, 3)[2:]
except (ValueError, AttributeError):
self.logger.debug('No schema identifier. Not expiring key.')
return False
if schema != 'google-ssh':
self.logger.debug('Invalid schema %s. Not expiring key.', schema)
return False
try:
json_obj = json.loads(json_str)
except ValueError:
self.logger.debug('Invalid JSON %s. Not expiring key.', json_str)
return False
if 'expireOn' not in json_obj:
self.logger.debug('No expiration timestamp. Not expiring key.')
return False
expire_str = json_obj['expireOn']
format_str = '%Y-%m-%dT%H:%M:%S+0000'
try:
expire_time = datetime.datetime.strptime(expire_str, format_str)
except ValueError:
self.logger.warning(
'Expiration timestamp "%s" not in format %s. Not expiring key.',
expire_str, format_str)
return False
# Expire the key if and only if we have exceeded the expiration timestamp.
return datetime.datetime.utcnow() > expire_time | [
"def",
"_HasExpired",
"(",
"self",
",",
"key",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Processing key: %s.'",
",",
"key",
")",
"try",
":",
"schema",
",",
"json_str",
"=",
"key",
".",
"split",
"(",
"None",
",",
"3",
")",
"[",
"2",
":",... | Check whether an SSH key has expired.
Uses Google-specific semantics of the OpenSSH public key format's comment
field to determine if an SSH key is past its expiration timestamp, and
therefore no longer to be trusted. This format is still subject to change.
Reliance on it in any way is at your own risk.
Args:
key: string, a single public key entry in OpenSSH public key file format.
This will be checked for Google-specific comment semantics, and if
present, those will be analysed.
Returns:
bool, True if the key has Google-specific comment semantics and has an
expiration timestamp in the past, or False otherwise. | [
"Check",
"whether",
"an",
"SSH",
"key",
"has",
"expired",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py#L78-L128 | train | 229,359 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py | AccountsDaemon._ParseAccountsData | def _ParseAccountsData(self, account_data):
"""Parse the SSH key data into a user map.
Args:
account_data: string, the metadata server SSH key attributes data.
Returns:
dict, a mapping of the form: {'username': ['sshkey1, 'sshkey2', ...]}.
"""
if not account_data:
return {}
lines = [line for line in account_data.splitlines() if line]
user_map = {}
for line in lines:
if not all(ord(c) < 128 for c in line):
self.logger.info('SSH key contains non-ascii character: %s.', line)
continue
split_line = line.split(':', 1)
if len(split_line) != 2:
self.logger.info('SSH key is not a complete entry: %s.', split_line)
continue
user, key = split_line
if self._HasExpired(key):
self.logger.debug('Expired SSH key for user %s: %s.', user, key)
continue
if user not in user_map:
user_map[user] = []
user_map[user].append(key)
logging.debug('User accounts: %s.', user_map)
return user_map | python | def _ParseAccountsData(self, account_data):
"""Parse the SSH key data into a user map.
Args:
account_data: string, the metadata server SSH key attributes data.
Returns:
dict, a mapping of the form: {'username': ['sshkey1, 'sshkey2', ...]}.
"""
if not account_data:
return {}
lines = [line for line in account_data.splitlines() if line]
user_map = {}
for line in lines:
if not all(ord(c) < 128 for c in line):
self.logger.info('SSH key contains non-ascii character: %s.', line)
continue
split_line = line.split(':', 1)
if len(split_line) != 2:
self.logger.info('SSH key is not a complete entry: %s.', split_line)
continue
user, key = split_line
if self._HasExpired(key):
self.logger.debug('Expired SSH key for user %s: %s.', user, key)
continue
if user not in user_map:
user_map[user] = []
user_map[user].append(key)
logging.debug('User accounts: %s.', user_map)
return user_map | [
"def",
"_ParseAccountsData",
"(",
"self",
",",
"account_data",
")",
":",
"if",
"not",
"account_data",
":",
"return",
"{",
"}",
"lines",
"=",
"[",
"line",
"for",
"line",
"in",
"account_data",
".",
"splitlines",
"(",
")",
"if",
"line",
"]",
"user_map",
"="... | Parse the SSH key data into a user map.
Args:
account_data: string, the metadata server SSH key attributes data.
Returns:
dict, a mapping of the form: {'username': ['sshkey1, 'sshkey2', ...]}. | [
"Parse",
"the",
"SSH",
"key",
"data",
"into",
"a",
"user",
"map",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py#L130-L159 | train | 229,360 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py | AccountsDaemon._GetInstanceAndProjectAttributes | def _GetInstanceAndProjectAttributes(self, metadata_dict):
"""Get dictionaries for instance and project attributes.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
tuple, two dictionaries for instance and project attributes.
"""
metadata_dict = metadata_dict or {}
try:
instance_data = metadata_dict['instance']['attributes']
except KeyError:
instance_data = {}
self.logger.warning('Instance attributes were not found.')
try:
project_data = metadata_dict['project']['attributes']
except KeyError:
project_data = {}
self.logger.warning('Project attributes were not found.')
return instance_data, project_data | python | def _GetInstanceAndProjectAttributes(self, metadata_dict):
"""Get dictionaries for instance and project attributes.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
tuple, two dictionaries for instance and project attributes.
"""
metadata_dict = metadata_dict or {}
try:
instance_data = metadata_dict['instance']['attributes']
except KeyError:
instance_data = {}
self.logger.warning('Instance attributes were not found.')
try:
project_data = metadata_dict['project']['attributes']
except KeyError:
project_data = {}
self.logger.warning('Project attributes were not found.')
return instance_data, project_data | [
"def",
"_GetInstanceAndProjectAttributes",
"(",
"self",
",",
"metadata_dict",
")",
":",
"metadata_dict",
"=",
"metadata_dict",
"or",
"{",
"}",
"try",
":",
"instance_data",
"=",
"metadata_dict",
"[",
"'instance'",
"]",
"[",
"'attributes'",
"]",
"except",
"KeyError"... | Get dictionaries for instance and project attributes.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
tuple, two dictionaries for instance and project attributes. | [
"Get",
"dictionaries",
"for",
"instance",
"and",
"project",
"attributes",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py#L161-L184 | train | 229,361 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py | AccountsDaemon._GetAccountsData | def _GetAccountsData(self, metadata_dict):
"""Get the user accounts specified in metadata server contents.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
dict, a mapping of the form: {'username': ['sshkey1, 'sshkey2', ...]}.
"""
instance_data, project_data = self._GetInstanceAndProjectAttributes(
metadata_dict)
valid_keys = [instance_data.get('sshKeys'), instance_data.get('ssh-keys')]
block_project = instance_data.get('block-project-ssh-keys', '').lower()
if block_project != 'true' and not instance_data.get('sshKeys'):
valid_keys.append(project_data.get('ssh-keys'))
valid_keys.append(project_data.get('sshKeys'))
accounts_data = '\n'.join([key for key in valid_keys if key])
return self._ParseAccountsData(accounts_data) | python | def _GetAccountsData(self, metadata_dict):
"""Get the user accounts specified in metadata server contents.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
dict, a mapping of the form: {'username': ['sshkey1, 'sshkey2', ...]}.
"""
instance_data, project_data = self._GetInstanceAndProjectAttributes(
metadata_dict)
valid_keys = [instance_data.get('sshKeys'), instance_data.get('ssh-keys')]
block_project = instance_data.get('block-project-ssh-keys', '').lower()
if block_project != 'true' and not instance_data.get('sshKeys'):
valid_keys.append(project_data.get('ssh-keys'))
valid_keys.append(project_data.get('sshKeys'))
accounts_data = '\n'.join([key for key in valid_keys if key])
return self._ParseAccountsData(accounts_data) | [
"def",
"_GetAccountsData",
"(",
"self",
",",
"metadata_dict",
")",
":",
"instance_data",
",",
"project_data",
"=",
"self",
".",
"_GetInstanceAndProjectAttributes",
"(",
"metadata_dict",
")",
"valid_keys",
"=",
"[",
"instance_data",
".",
"get",
"(",
"'sshKeys'",
")... | Get the user accounts specified in metadata server contents.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
dict, a mapping of the form: {'username': ['sshkey1, 'sshkey2', ...]}. | [
"Get",
"the",
"user",
"accounts",
"specified",
"in",
"metadata",
"server",
"contents",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py#L186-L203 | train | 229,362 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py | AccountsDaemon._UpdateUsers | def _UpdateUsers(self, update_users):
"""Provision and update Linux user accounts based on account metadata.
Args:
update_users: dict, authorized users mapped to their public SSH keys.
"""
for user, ssh_keys in update_users.items():
if not user or user in self.invalid_users:
continue
configured_keys = self.user_ssh_keys.get(user, [])
if set(ssh_keys) != set(configured_keys):
if not self.utils.UpdateUser(user, ssh_keys):
self.invalid_users.add(user)
else:
self.user_ssh_keys[user] = ssh_keys[:] | python | def _UpdateUsers(self, update_users):
"""Provision and update Linux user accounts based on account metadata.
Args:
update_users: dict, authorized users mapped to their public SSH keys.
"""
for user, ssh_keys in update_users.items():
if not user or user in self.invalid_users:
continue
configured_keys = self.user_ssh_keys.get(user, [])
if set(ssh_keys) != set(configured_keys):
if not self.utils.UpdateUser(user, ssh_keys):
self.invalid_users.add(user)
else:
self.user_ssh_keys[user] = ssh_keys[:] | [
"def",
"_UpdateUsers",
"(",
"self",
",",
"update_users",
")",
":",
"for",
"user",
",",
"ssh_keys",
"in",
"update_users",
".",
"items",
"(",
")",
":",
"if",
"not",
"user",
"or",
"user",
"in",
"self",
".",
"invalid_users",
":",
"continue",
"configured_keys",... | Provision and update Linux user accounts based on account metadata.
Args:
update_users: dict, authorized users mapped to their public SSH keys. | [
"Provision",
"and",
"update",
"Linux",
"user",
"accounts",
"based",
"on",
"account",
"metadata",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py#L205-L219 | train | 229,363 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py | AccountsDaemon._RemoveUsers | def _RemoveUsers(self, remove_users):
"""Deprovision Linux user accounts that do not appear in account metadata.
Args:
remove_users: list, the username strings of the Linux accounts to remove.
"""
for username in remove_users:
self.utils.RemoveUser(username)
self.user_ssh_keys.pop(username, None)
self.invalid_users -= set(remove_users) | python | def _RemoveUsers(self, remove_users):
"""Deprovision Linux user accounts that do not appear in account metadata.
Args:
remove_users: list, the username strings of the Linux accounts to remove.
"""
for username in remove_users:
self.utils.RemoveUser(username)
self.user_ssh_keys.pop(username, None)
self.invalid_users -= set(remove_users) | [
"def",
"_RemoveUsers",
"(",
"self",
",",
"remove_users",
")",
":",
"for",
"username",
"in",
"remove_users",
":",
"self",
".",
"utils",
".",
"RemoveUser",
"(",
"username",
")",
"self",
".",
"user_ssh_keys",
".",
"pop",
"(",
"username",
",",
"None",
")",
"... | Deprovision Linux user accounts that do not appear in account metadata.
Args:
remove_users: list, the username strings of the Linux accounts to remove. | [
"Deprovision",
"Linux",
"user",
"accounts",
"that",
"do",
"not",
"appear",
"in",
"account",
"metadata",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py#L221-L230 | train | 229,364 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py | AccountsDaemon._GetEnableOsLoginValue | def _GetEnableOsLoginValue(self, metadata_dict):
"""Get the value of the enable-oslogin metadata key.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
bool, True if OS Login is enabled for VM access.
"""
instance_data, project_data = self._GetInstanceAndProjectAttributes(
metadata_dict)
instance_value = instance_data.get('enable-oslogin')
project_value = project_data.get('enable-oslogin')
value = instance_value or project_value or ''
return value.lower() == 'true' | python | def _GetEnableOsLoginValue(self, metadata_dict):
"""Get the value of the enable-oslogin metadata key.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
bool, True if OS Login is enabled for VM access.
"""
instance_data, project_data = self._GetInstanceAndProjectAttributes(
metadata_dict)
instance_value = instance_data.get('enable-oslogin')
project_value = project_data.get('enable-oslogin')
value = instance_value or project_value or ''
return value.lower() == 'true' | [
"def",
"_GetEnableOsLoginValue",
"(",
"self",
",",
"metadata_dict",
")",
":",
"instance_data",
",",
"project_data",
"=",
"self",
".",
"_GetInstanceAndProjectAttributes",
"(",
"metadata_dict",
")",
"instance_value",
"=",
"instance_data",
".",
"get",
"(",
"'enable-oslog... | Get the value of the enable-oslogin metadata key.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
bool, True if OS Login is enabled for VM access. | [
"Get",
"the",
"value",
"of",
"the",
"enable",
"-",
"oslogin",
"metadata",
"key",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py#L232-L247 | train | 229,365 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py | AccountsDaemon.HandleAccounts | def HandleAccounts(self, result):
"""Called when there are changes to the contents of the metadata server.
Args:
result: json, the deserialized contents of the metadata server.
"""
self.logger.debug('Checking for changes to user accounts.')
configured_users = self.utils.GetConfiguredUsers()
enable_oslogin = self._GetEnableOsLoginValue(result)
enable_two_factor = self._GetEnableTwoFactorValue(result)
if enable_oslogin:
desired_users = {}
self.oslogin.UpdateOsLogin(True, two_factor_desired=enable_two_factor)
else:
desired_users = self._GetAccountsData(result)
self.oslogin.UpdateOsLogin(False)
remove_users = sorted(set(configured_users) - set(desired_users.keys()))
self._UpdateUsers(desired_users)
self._RemoveUsers(remove_users)
self.utils.SetConfiguredUsers(desired_users.keys()) | python | def HandleAccounts(self, result):
"""Called when there are changes to the contents of the metadata server.
Args:
result: json, the deserialized contents of the metadata server.
"""
self.logger.debug('Checking for changes to user accounts.')
configured_users = self.utils.GetConfiguredUsers()
enable_oslogin = self._GetEnableOsLoginValue(result)
enable_two_factor = self._GetEnableTwoFactorValue(result)
if enable_oslogin:
desired_users = {}
self.oslogin.UpdateOsLogin(True, two_factor_desired=enable_two_factor)
else:
desired_users = self._GetAccountsData(result)
self.oslogin.UpdateOsLogin(False)
remove_users = sorted(set(configured_users) - set(desired_users.keys()))
self._UpdateUsers(desired_users)
self._RemoveUsers(remove_users)
self.utils.SetConfiguredUsers(desired_users.keys()) | [
"def",
"HandleAccounts",
"(",
"self",
",",
"result",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Checking for changes to user accounts.'",
")",
"configured_users",
"=",
"self",
".",
"utils",
".",
"GetConfiguredUsers",
"(",
")",
"enable_oslogin",
"=",
"... | Called when there are changes to the contents of the metadata server.
Args:
result: json, the deserialized contents of the metadata server. | [
"Called",
"when",
"there",
"are",
"changes",
"to",
"the",
"contents",
"of",
"the",
"metadata",
"server",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/accounts/accounts_daemon.py#L266-L285 | train | 229,366 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/file_utils.py | _SetSELinuxContext | def _SetSELinuxContext(path):
"""Set the appropriate SELinux context, if SELinux tools are installed.
Calls /sbin/restorecon on the provided path to set the SELinux context as
specified by policy. This call does not operate recursively.
Only some OS configurations use SELinux. It is therefore acceptable for
restorecon to be missing, in which case we do nothing.
Args:
path: string, the path on which to fix the SELinux context.
"""
restorecon = '/sbin/restorecon'
if os.path.isfile(restorecon) and os.access(restorecon, os.X_OK):
subprocess.call([restorecon, path]) | python | def _SetSELinuxContext(path):
"""Set the appropriate SELinux context, if SELinux tools are installed.
Calls /sbin/restorecon on the provided path to set the SELinux context as
specified by policy. This call does not operate recursively.
Only some OS configurations use SELinux. It is therefore acceptable for
restorecon to be missing, in which case we do nothing.
Args:
path: string, the path on which to fix the SELinux context.
"""
restorecon = '/sbin/restorecon'
if os.path.isfile(restorecon) and os.access(restorecon, os.X_OK):
subprocess.call([restorecon, path]) | [
"def",
"_SetSELinuxContext",
"(",
"path",
")",
":",
"restorecon",
"=",
"'/sbin/restorecon'",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"restorecon",
")",
"and",
"os",
".",
"access",
"(",
"restorecon",
",",
"os",
".",
"X_OK",
")",
":",
"subprocess",
".... | Set the appropriate SELinux context, if SELinux tools are installed.
Calls /sbin/restorecon on the provided path to set the SELinux context as
specified by policy. This call does not operate recursively.
Only some OS configurations use SELinux. It is therefore acceptable for
restorecon to be missing, in which case we do nothing.
Args:
path: string, the path on which to fix the SELinux context. | [
"Set",
"the",
"appropriate",
"SELinux",
"context",
"if",
"SELinux",
"tools",
"are",
"installed",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/file_utils.py#L25-L39 | train | 229,367 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/file_utils.py | SetPermissions | def SetPermissions(path, mode=None, uid=None, gid=None, mkdir=False):
"""Set the permissions and ownership of a path.
Args:
path: string, the path for which owner ID and group ID needs to be setup.
mode: octal string, the permissions to set on the path.
uid: int, the owner ID to be set for the path.
gid: int, the group ID to be set for the path.
mkdir: bool, True if the directory needs to be created.
"""
if mkdir and not os.path.exists(path):
os.mkdir(path, mode or 0o777)
elif mode:
os.chmod(path, mode)
if uid and gid:
os.chown(path, uid, gid)
_SetSELinuxContext(path) | python | def SetPermissions(path, mode=None, uid=None, gid=None, mkdir=False):
"""Set the permissions and ownership of a path.
Args:
path: string, the path for which owner ID and group ID needs to be setup.
mode: octal string, the permissions to set on the path.
uid: int, the owner ID to be set for the path.
gid: int, the group ID to be set for the path.
mkdir: bool, True if the directory needs to be created.
"""
if mkdir and not os.path.exists(path):
os.mkdir(path, mode or 0o777)
elif mode:
os.chmod(path, mode)
if uid and gid:
os.chown(path, uid, gid)
_SetSELinuxContext(path) | [
"def",
"SetPermissions",
"(",
"path",
",",
"mode",
"=",
"None",
",",
"uid",
"=",
"None",
",",
"gid",
"=",
"None",
",",
"mkdir",
"=",
"False",
")",
":",
"if",
"mkdir",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"os",
... | Set the permissions and ownership of a path.
Args:
path: string, the path for which owner ID and group ID needs to be setup.
mode: octal string, the permissions to set on the path.
uid: int, the owner ID to be set for the path.
gid: int, the group ID to be set for the path.
mkdir: bool, True if the directory needs to be created. | [
"Set",
"the",
"permissions",
"and",
"ownership",
"of",
"a",
"path",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/file_utils.py#L42-L58 | train | 229,368 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/file_utils.py | Lock | def Lock(fd, path, blocking):
"""Lock the provided file descriptor.
Args:
fd: int, the file descriptor of the file to lock.
path: string, the name of the file to lock.
blocking: bool, whether the function should return immediately.
Raises:
IOError, raised from flock while attempting to lock a file.
"""
operation = fcntl.LOCK_EX if blocking else fcntl.LOCK_EX | fcntl.LOCK_NB
try:
fcntl.flock(fd, operation)
except IOError as e:
if e.errno == errno.EWOULDBLOCK:
raise IOError('Exception locking %s. File already locked.' % path)
else:
raise IOError('Exception locking %s. %s.' % (path, str(e))) | python | def Lock(fd, path, blocking):
"""Lock the provided file descriptor.
Args:
fd: int, the file descriptor of the file to lock.
path: string, the name of the file to lock.
blocking: bool, whether the function should return immediately.
Raises:
IOError, raised from flock while attempting to lock a file.
"""
operation = fcntl.LOCK_EX if blocking else fcntl.LOCK_EX | fcntl.LOCK_NB
try:
fcntl.flock(fd, operation)
except IOError as e:
if e.errno == errno.EWOULDBLOCK:
raise IOError('Exception locking %s. File already locked.' % path)
else:
raise IOError('Exception locking %s. %s.' % (path, str(e))) | [
"def",
"Lock",
"(",
"fd",
",",
"path",
",",
"blocking",
")",
":",
"operation",
"=",
"fcntl",
".",
"LOCK_EX",
"if",
"blocking",
"else",
"fcntl",
".",
"LOCK_EX",
"|",
"fcntl",
".",
"LOCK_NB",
"try",
":",
"fcntl",
".",
"flock",
"(",
"fd",
",",
"operatio... | Lock the provided file descriptor.
Args:
fd: int, the file descriptor of the file to lock.
path: string, the name of the file to lock.
blocking: bool, whether the function should return immediately.
Raises:
IOError, raised from flock while attempting to lock a file. | [
"Lock",
"the",
"provided",
"file",
"descriptor",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/file_utils.py#L61-L79 | train | 229,369 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/file_utils.py | Unlock | def Unlock(fd, path):
"""Release the lock on the file.
Args:
fd: int, the file descriptor of the file to unlock.
path: string, the name of the file to lock.
Raises:
IOError, raised from flock while attempting to release a file lock.
"""
try:
fcntl.flock(fd, fcntl.LOCK_UN | fcntl.LOCK_NB)
except IOError as e:
if e.errno == errno.EWOULDBLOCK:
raise IOError('Exception unlocking %s. Locked by another process.' % path)
else:
raise IOError('Exception unlocking %s. %s.' % (path, str(e))) | python | def Unlock(fd, path):
"""Release the lock on the file.
Args:
fd: int, the file descriptor of the file to unlock.
path: string, the name of the file to lock.
Raises:
IOError, raised from flock while attempting to release a file lock.
"""
try:
fcntl.flock(fd, fcntl.LOCK_UN | fcntl.LOCK_NB)
except IOError as e:
if e.errno == errno.EWOULDBLOCK:
raise IOError('Exception unlocking %s. Locked by another process.' % path)
else:
raise IOError('Exception unlocking %s. %s.' % (path, str(e))) | [
"def",
"Unlock",
"(",
"fd",
",",
"path",
")",
":",
"try",
":",
"fcntl",
".",
"flock",
"(",
"fd",
",",
"fcntl",
".",
"LOCK_UN",
"|",
"fcntl",
".",
"LOCK_NB",
")",
"except",
"IOError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"==",
"errno",
".",
... | Release the lock on the file.
Args:
fd: int, the file descriptor of the file to unlock.
path: string, the name of the file to lock.
Raises:
IOError, raised from flock while attempting to release a file lock. | [
"Release",
"the",
"lock",
"on",
"the",
"file",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/file_utils.py#L82-L98 | train | 229,370 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/file_utils.py | LockFile | def LockFile(path, blocking=False):
"""Interface to flock-based file locking to prevent concurrent executions.
Args:
path: string, the name of the file to lock.
blocking: bool, whether the function should return immediately.
Yields:
None, yields when a lock on the file is obtained.
Raises:
IOError, raised from flock locking operations on a file.
OSError, raised from file operations.
"""
fd = os.open(path, os.O_CREAT)
try:
Lock(fd, path, blocking)
yield
finally:
try:
Unlock(fd, path)
finally:
os.close(fd) | python | def LockFile(path, blocking=False):
"""Interface to flock-based file locking to prevent concurrent executions.
Args:
path: string, the name of the file to lock.
blocking: bool, whether the function should return immediately.
Yields:
None, yields when a lock on the file is obtained.
Raises:
IOError, raised from flock locking operations on a file.
OSError, raised from file operations.
"""
fd = os.open(path, os.O_CREAT)
try:
Lock(fd, path, blocking)
yield
finally:
try:
Unlock(fd, path)
finally:
os.close(fd) | [
"def",
"LockFile",
"(",
"path",
",",
"blocking",
"=",
"False",
")",
":",
"fd",
"=",
"os",
".",
"open",
"(",
"path",
",",
"os",
".",
"O_CREAT",
")",
"try",
":",
"Lock",
"(",
"fd",
",",
"path",
",",
"blocking",
")",
"yield",
"finally",
":",
"try",
... | Interface to flock-based file locking to prevent concurrent executions.
Args:
path: string, the name of the file to lock.
blocking: bool, whether the function should return immediately.
Yields:
None, yields when a lock on the file is obtained.
Raises:
IOError, raised from flock locking operations on a file.
OSError, raised from file operations. | [
"Interface",
"to",
"flock",
"-",
"based",
"file",
"locking",
"to",
"prevent",
"concurrent",
"executions",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/file_utils.py#L102-L124 | train | 229,371 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py | RetryOnUnavailable | def RetryOnUnavailable(func):
"""Function decorator to retry on a service unavailable exception."""
@functools.wraps(func)
def Wrapper(*args, **kwargs):
while True:
try:
response = func(*args, **kwargs)
except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:
time.sleep(5)
if (isinstance(e, urlerror.HTTPError)
and e.getcode() == httpclient.SERVICE_UNAVAILABLE):
continue
elif isinstance(e, socket.timeout):
continue
raise
else:
if response.getcode() == httpclient.OK:
return response
else:
raise StatusException(response)
return Wrapper | python | def RetryOnUnavailable(func):
"""Function decorator to retry on a service unavailable exception."""
@functools.wraps(func)
def Wrapper(*args, **kwargs):
while True:
try:
response = func(*args, **kwargs)
except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:
time.sleep(5)
if (isinstance(e, urlerror.HTTPError)
and e.getcode() == httpclient.SERVICE_UNAVAILABLE):
continue
elif isinstance(e, socket.timeout):
continue
raise
else:
if response.getcode() == httpclient.OK:
return response
else:
raise StatusException(response)
return Wrapper | [
"def",
"RetryOnUnavailable",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"Wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"while",
"True",
":",
"try",
":",
"response",
"=",
"func",
"(",
"*",
"args"... | Function decorator to retry on a service unavailable exception. | [
"Function",
"decorator",
"to",
"retry",
"on",
"a",
"service",
"unavailable",
"exception",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py#L43-L64 | train | 229,372 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py | MetadataWatcher._GetMetadataRequest | def _GetMetadataRequest(self, metadata_url, params=None, timeout=None):
"""Performs a GET request with the metadata headers.
Args:
metadata_url: string, the URL to perform a GET request on.
params: dictionary, the query parameters in the GET request.
timeout: int, timeout in seconds for metadata requests.
Returns:
HTTP response from the GET request.
Raises:
urlerror.HTTPError: raises when the GET request fails.
"""
headers = {'Metadata-Flavor': 'Google'}
params = urlparse.urlencode(params or {})
url = '%s?%s' % (metadata_url, params)
request = urlrequest.Request(url, headers=headers)
request_opener = urlrequest.build_opener(urlrequest.ProxyHandler({}))
timeout = timeout or self.timeout
return request_opener.open(request, timeout=timeout*1.1) | python | def _GetMetadataRequest(self, metadata_url, params=None, timeout=None):
"""Performs a GET request with the metadata headers.
Args:
metadata_url: string, the URL to perform a GET request on.
params: dictionary, the query parameters in the GET request.
timeout: int, timeout in seconds for metadata requests.
Returns:
HTTP response from the GET request.
Raises:
urlerror.HTTPError: raises when the GET request fails.
"""
headers = {'Metadata-Flavor': 'Google'}
params = urlparse.urlencode(params or {})
url = '%s?%s' % (metadata_url, params)
request = urlrequest.Request(url, headers=headers)
request_opener = urlrequest.build_opener(urlrequest.ProxyHandler({}))
timeout = timeout or self.timeout
return request_opener.open(request, timeout=timeout*1.1) | [
"def",
"_GetMetadataRequest",
"(",
"self",
",",
"metadata_url",
",",
"params",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"headers",
"=",
"{",
"'Metadata-Flavor'",
":",
"'Google'",
"}",
"params",
"=",
"urlparse",
".",
"urlencode",
"(",
"params",
"... | Performs a GET request with the metadata headers.
Args:
metadata_url: string, the URL to perform a GET request on.
params: dictionary, the query parameters in the GET request.
timeout: int, timeout in seconds for metadata requests.
Returns:
HTTP response from the GET request.
Raises:
urlerror.HTTPError: raises when the GET request fails. | [
"Performs",
"a",
"GET",
"request",
"with",
"the",
"metadata",
"headers",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py#L82-L102 | train | 229,373 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py | MetadataWatcher._UpdateEtag | def _UpdateEtag(self, response):
"""Update the etag from an API response.
Args:
response: HTTP response with a header field.
Returns:
bool, True if the etag in the response header updated.
"""
etag = response.headers.get('etag', self.etag)
etag_updated = self.etag != etag
self.etag = etag
return etag_updated | python | def _UpdateEtag(self, response):
"""Update the etag from an API response.
Args:
response: HTTP response with a header field.
Returns:
bool, True if the etag in the response header updated.
"""
etag = response.headers.get('etag', self.etag)
etag_updated = self.etag != etag
self.etag = etag
return etag_updated | [
"def",
"_UpdateEtag",
"(",
"self",
",",
"response",
")",
":",
"etag",
"=",
"response",
".",
"headers",
".",
"get",
"(",
"'etag'",
",",
"self",
".",
"etag",
")",
"etag_updated",
"=",
"self",
".",
"etag",
"!=",
"etag",
"self",
".",
"etag",
"=",
"etag",... | Update the etag from an API response.
Args:
response: HTTP response with a header field.
Returns:
bool, True if the etag in the response header updated. | [
"Update",
"the",
"etag",
"from",
"an",
"API",
"response",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py#L104-L116 | train | 229,374 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py | MetadataWatcher._GetMetadataUpdate | def _GetMetadataUpdate(
self, metadata_key='', recursive=True, wait=True, timeout=None):
"""Request the contents of metadata server and deserialize the response.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
wait: bool, True if we should wait for a metadata change.
timeout: int, timeout in seconds for returning metadata output.
Returns:
json, the deserialized contents of the metadata server.
"""
metadata_key = os.path.join(metadata_key, '') if recursive else metadata_key
metadata_url = os.path.join(METADATA_SERVER, metadata_key)
params = {
'alt': 'json',
'last_etag': self.etag,
'recursive': recursive,
'timeout_sec': timeout or self.timeout,
'wait_for_change': wait,
}
while True:
response = self._GetMetadataRequest(
metadata_url, params=params, timeout=timeout)
etag_updated = self._UpdateEtag(response)
if wait and not etag_updated and not timeout:
# Retry until the etag is updated.
continue
else:
# One of the following are true:
# - Waiting for change is not required.
# - The etag is updated.
# - The user specified a request timeout.
break
return json.loads(response.read().decode('utf-8')) | python | def _GetMetadataUpdate(
self, metadata_key='', recursive=True, wait=True, timeout=None):
"""Request the contents of metadata server and deserialize the response.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
wait: bool, True if we should wait for a metadata change.
timeout: int, timeout in seconds for returning metadata output.
Returns:
json, the deserialized contents of the metadata server.
"""
metadata_key = os.path.join(metadata_key, '') if recursive else metadata_key
metadata_url = os.path.join(METADATA_SERVER, metadata_key)
params = {
'alt': 'json',
'last_etag': self.etag,
'recursive': recursive,
'timeout_sec': timeout or self.timeout,
'wait_for_change': wait,
}
while True:
response = self._GetMetadataRequest(
metadata_url, params=params, timeout=timeout)
etag_updated = self._UpdateEtag(response)
if wait and not etag_updated and not timeout:
# Retry until the etag is updated.
continue
else:
# One of the following are true:
# - Waiting for change is not required.
# - The etag is updated.
# - The user specified a request timeout.
break
return json.loads(response.read().decode('utf-8')) | [
"def",
"_GetMetadataUpdate",
"(",
"self",
",",
"metadata_key",
"=",
"''",
",",
"recursive",
"=",
"True",
",",
"wait",
"=",
"True",
",",
"timeout",
"=",
"None",
")",
":",
"metadata_key",
"=",
"os",
".",
"path",
".",
"join",
"(",
"metadata_key",
",",
"''... | Request the contents of metadata server and deserialize the response.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
wait: bool, True if we should wait for a metadata change.
timeout: int, timeout in seconds for returning metadata output.
Returns:
json, the deserialized contents of the metadata server. | [
"Request",
"the",
"contents",
"of",
"metadata",
"server",
"and",
"deserialize",
"the",
"response",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py#L118-L153 | train | 229,375 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py | MetadataWatcher._HandleMetadataUpdate | def _HandleMetadataUpdate(
self, metadata_key='', recursive=True, wait=True, timeout=None,
retry=True):
"""Wait for a successful metadata response.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
wait: bool, True if we should wait for a metadata change.
timeout: int, timeout in seconds for returning metadata output.
retry: bool, True if we should retry on failure.
Returns:
json, the deserialized contents of the metadata server.
"""
exception = None
while True:
try:
return self._GetMetadataUpdate(
metadata_key=metadata_key, recursive=recursive, wait=wait,
timeout=timeout)
except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:
if not isinstance(e, type(exception)):
exception = e
self.logger.error('GET request error retrieving metadata. %s.', e)
if retry:
continue
else:
break | python | def _HandleMetadataUpdate(
self, metadata_key='', recursive=True, wait=True, timeout=None,
retry=True):
"""Wait for a successful metadata response.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
wait: bool, True if we should wait for a metadata change.
timeout: int, timeout in seconds for returning metadata output.
retry: bool, True if we should retry on failure.
Returns:
json, the deserialized contents of the metadata server.
"""
exception = None
while True:
try:
return self._GetMetadataUpdate(
metadata_key=metadata_key, recursive=recursive, wait=wait,
timeout=timeout)
except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:
if not isinstance(e, type(exception)):
exception = e
self.logger.error('GET request error retrieving metadata. %s.', e)
if retry:
continue
else:
break | [
"def",
"_HandleMetadataUpdate",
"(",
"self",
",",
"metadata_key",
"=",
"''",
",",
"recursive",
"=",
"True",
",",
"wait",
"=",
"True",
",",
"timeout",
"=",
"None",
",",
"retry",
"=",
"True",
")",
":",
"exception",
"=",
"None",
"while",
"True",
":",
"try... | Wait for a successful metadata response.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
wait: bool, True if we should wait for a metadata change.
timeout: int, timeout in seconds for returning metadata output.
retry: bool, True if we should retry on failure.
Returns:
json, the deserialized contents of the metadata server. | [
"Wait",
"for",
"a",
"successful",
"metadata",
"response",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py#L155-L183 | train | 229,376 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py | MetadataWatcher.WatchMetadata | def WatchMetadata(
self, handler, metadata_key='', recursive=True, timeout=None):
"""Watch for changes to the contents of the metadata server.
Args:
handler: callable, a function to call with the updated metadata contents.
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output.
"""
while True:
response = self._HandleMetadataUpdate(
metadata_key=metadata_key, recursive=recursive, wait=True,
timeout=timeout)
try:
handler(response)
except Exception as e:
self.logger.exception('Exception calling the response handler. %s.', e) | python | def WatchMetadata(
self, handler, metadata_key='', recursive=True, timeout=None):
"""Watch for changes to the contents of the metadata server.
Args:
handler: callable, a function to call with the updated metadata contents.
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output.
"""
while True:
response = self._HandleMetadataUpdate(
metadata_key=metadata_key, recursive=recursive, wait=True,
timeout=timeout)
try:
handler(response)
except Exception as e:
self.logger.exception('Exception calling the response handler. %s.', e) | [
"def",
"WatchMetadata",
"(",
"self",
",",
"handler",
",",
"metadata_key",
"=",
"''",
",",
"recursive",
"=",
"True",
",",
"timeout",
"=",
"None",
")",
":",
"while",
"True",
":",
"response",
"=",
"self",
".",
"_HandleMetadataUpdate",
"(",
"metadata_key",
"="... | Watch for changes to the contents of the metadata server.
Args:
handler: callable, a function to call with the updated metadata contents.
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output. | [
"Watch",
"for",
"changes",
"to",
"the",
"contents",
"of",
"the",
"metadata",
"server",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py#L185-L202 | train | 229,377 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py | MetadataWatcher.GetMetadata | def GetMetadata(
self, metadata_key='', recursive=True, timeout=None, retry=True):
"""Retrieve the contents of metadata server for a metadata key.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output.
retry: bool, True if we should retry on failure.
Returns:
json, the deserialized contents of the metadata server or None if error.
"""
return self._HandleMetadataUpdate(
metadata_key=metadata_key, recursive=recursive, wait=False,
timeout=timeout, retry=retry) | python | def GetMetadata(
self, metadata_key='', recursive=True, timeout=None, retry=True):
"""Retrieve the contents of metadata server for a metadata key.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output.
retry: bool, True if we should retry on failure.
Returns:
json, the deserialized contents of the metadata server or None if error.
"""
return self._HandleMetadataUpdate(
metadata_key=metadata_key, recursive=recursive, wait=False,
timeout=timeout, retry=retry) | [
"def",
"GetMetadata",
"(",
"self",
",",
"metadata_key",
"=",
"''",
",",
"recursive",
"=",
"True",
",",
"timeout",
"=",
"None",
",",
"retry",
"=",
"True",
")",
":",
"return",
"self",
".",
"_HandleMetadataUpdate",
"(",
"metadata_key",
"=",
"metadata_key",
",... | Retrieve the contents of metadata server for a metadata key.
Args:
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output.
retry: bool, True if we should retry on failure.
Returns:
json, the deserialized contents of the metadata server or None if error. | [
"Retrieve",
"the",
"contents",
"of",
"metadata",
"server",
"for",
"a",
"metadata",
"key",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/metadata_watcher.py#L204-L219 | train | 229,378 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/networking/ip_forwarding/ip_forwarding.py | IpForwarding._LogForwardedIpChanges | def _LogForwardedIpChanges(
self, configured, desired, to_add, to_remove, interface):
"""Log the planned IP address changes.
Args:
configured: list, the IP address strings already configured.
desired: list, the IP address strings that will be configured.
to_add: list, the forwarded IP address strings to configure.
to_remove: list, the forwarded IP address strings to delete.
interface: string, the output device to modify.
"""
if not to_add and not to_remove:
return
self.logger.info(
'Changing %s IPs from %s to %s by adding %s and removing %s.',
interface, configured or None, desired or None, to_add or None,
to_remove or None) | python | def _LogForwardedIpChanges(
self, configured, desired, to_add, to_remove, interface):
"""Log the planned IP address changes.
Args:
configured: list, the IP address strings already configured.
desired: list, the IP address strings that will be configured.
to_add: list, the forwarded IP address strings to configure.
to_remove: list, the forwarded IP address strings to delete.
interface: string, the output device to modify.
"""
if not to_add and not to_remove:
return
self.logger.info(
'Changing %s IPs from %s to %s by adding %s and removing %s.',
interface, configured or None, desired or None, to_add or None,
to_remove or None) | [
"def",
"_LogForwardedIpChanges",
"(",
"self",
",",
"configured",
",",
"desired",
",",
"to_add",
",",
"to_remove",
",",
"interface",
")",
":",
"if",
"not",
"to_add",
"and",
"not",
"to_remove",
":",
"return",
"self",
".",
"logger",
".",
"info",
"(",
"'Changi... | Log the planned IP address changes.
Args:
configured: list, the IP address strings already configured.
desired: list, the IP address strings that will be configured.
to_add: list, the forwarded IP address strings to configure.
to_remove: list, the forwarded IP address strings to delete.
interface: string, the output device to modify. | [
"Log",
"the",
"planned",
"IP",
"address",
"changes",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/networking/ip_forwarding/ip_forwarding.py#L45-L61 | train | 229,379 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/networking/ip_forwarding/ip_forwarding.py | IpForwarding._AddForwardedIps | def _AddForwardedIps(self, forwarded_ips, interface):
"""Configure the forwarded IP address on the network interface.
Args:
forwarded_ips: list, the forwarded IP address strings to configure.
interface: string, the output device to use.
"""
for address in forwarded_ips:
self.ip_forwarding_utils.AddForwardedIp(address, interface) | python | def _AddForwardedIps(self, forwarded_ips, interface):
"""Configure the forwarded IP address on the network interface.
Args:
forwarded_ips: list, the forwarded IP address strings to configure.
interface: string, the output device to use.
"""
for address in forwarded_ips:
self.ip_forwarding_utils.AddForwardedIp(address, interface) | [
"def",
"_AddForwardedIps",
"(",
"self",
",",
"forwarded_ips",
",",
"interface",
")",
":",
"for",
"address",
"in",
"forwarded_ips",
":",
"self",
".",
"ip_forwarding_utils",
".",
"AddForwardedIp",
"(",
"address",
",",
"interface",
")"
] | Configure the forwarded IP address on the network interface.
Args:
forwarded_ips: list, the forwarded IP address strings to configure.
interface: string, the output device to use. | [
"Configure",
"the",
"forwarded",
"IP",
"address",
"on",
"the",
"network",
"interface",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/networking/ip_forwarding/ip_forwarding.py#L63-L71 | train | 229,380 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/networking/ip_forwarding/ip_forwarding.py | IpForwarding._RemoveForwardedIps | def _RemoveForwardedIps(self, forwarded_ips, interface):
"""Remove the forwarded IP addresses from the network interface.
Args:
forwarded_ips: list, the forwarded IP address strings to delete.
interface: string, the output device to use.
"""
for address in forwarded_ips:
self.ip_forwarding_utils.RemoveForwardedIp(address, interface) | python | def _RemoveForwardedIps(self, forwarded_ips, interface):
"""Remove the forwarded IP addresses from the network interface.
Args:
forwarded_ips: list, the forwarded IP address strings to delete.
interface: string, the output device to use.
"""
for address in forwarded_ips:
self.ip_forwarding_utils.RemoveForwardedIp(address, interface) | [
"def",
"_RemoveForwardedIps",
"(",
"self",
",",
"forwarded_ips",
",",
"interface",
")",
":",
"for",
"address",
"in",
"forwarded_ips",
":",
"self",
".",
"ip_forwarding_utils",
".",
"RemoveForwardedIp",
"(",
"address",
",",
"interface",
")"
] | Remove the forwarded IP addresses from the network interface.
Args:
forwarded_ips: list, the forwarded IP address strings to delete.
interface: string, the output device to use. | [
"Remove",
"the",
"forwarded",
"IP",
"addresses",
"from",
"the",
"network",
"interface",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/networking/ip_forwarding/ip_forwarding.py#L73-L81 | train | 229,381 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/networking/ip_forwarding/ip_forwarding.py | IpForwarding.HandleForwardedIps | def HandleForwardedIps(self, interface, forwarded_ips, interface_ip=None):
"""Handle changes to the forwarded IPs on a network interface.
Args:
interface: string, the output device to configure.
forwarded_ips: list, the forwarded IP address strings desired.
interface_ip: string, current interface ip address.
"""
desired = self.ip_forwarding_utils.ParseForwardedIps(forwarded_ips)
configured = self.ip_forwarding_utils.GetForwardedIps(
interface, interface_ip)
to_add = sorted(set(desired) - set(configured))
to_remove = sorted(set(configured) - set(desired))
self._LogForwardedIpChanges(
configured, desired, to_add, to_remove, interface)
self._AddForwardedIps(to_add, interface)
self._RemoveForwardedIps(to_remove, interface) | python | def HandleForwardedIps(self, interface, forwarded_ips, interface_ip=None):
"""Handle changes to the forwarded IPs on a network interface.
Args:
interface: string, the output device to configure.
forwarded_ips: list, the forwarded IP address strings desired.
interface_ip: string, current interface ip address.
"""
desired = self.ip_forwarding_utils.ParseForwardedIps(forwarded_ips)
configured = self.ip_forwarding_utils.GetForwardedIps(
interface, interface_ip)
to_add = sorted(set(desired) - set(configured))
to_remove = sorted(set(configured) - set(desired))
self._LogForwardedIpChanges(
configured, desired, to_add, to_remove, interface)
self._AddForwardedIps(to_add, interface)
self._RemoveForwardedIps(to_remove, interface) | [
"def",
"HandleForwardedIps",
"(",
"self",
",",
"interface",
",",
"forwarded_ips",
",",
"interface_ip",
"=",
"None",
")",
":",
"desired",
"=",
"self",
".",
"ip_forwarding_utils",
".",
"ParseForwardedIps",
"(",
"forwarded_ips",
")",
"configured",
"=",
"self",
".",... | Handle changes to the forwarded IPs on a network interface.
Args:
interface: string, the output device to configure.
forwarded_ips: list, the forwarded IP address strings desired.
interface_ip: string, current interface ip address. | [
"Handle",
"changes",
"to",
"the",
"forwarded",
"IPs",
"on",
"a",
"network",
"interface",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/networking/ip_forwarding/ip_forwarding.py#L83-L99 | train | 229,382 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/sles_12/utils.py | Utils._WriteIfcfg | def _WriteIfcfg(self, interfaces, logger):
"""Write ifcfg files for multi-NIC support.
Overwrites the files. This allows us to update ifcfg-* in the future.
Disable the network setup to override this behavior and customize the
configurations.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
"""
for interface in interfaces:
interface_config = os.path.join(
self.network_path, 'ifcfg-%s' % interface)
interface_content = [
'# Added by Google.',
'STARTMODE=hotplug',
'BOOTPROTO=dhcp',
'DHCLIENT_SET_DEFAULT_ROUTE=yes',
'DHCLIENT_ROUTE_PRIORITY=10%s00' % interface,
'',
]
with open(interface_config, 'w') as interface_file:
interface_file.write('\n'.join(interface_content))
logger.info('Created ifcfg file for interface %s.', interface) | python | def _WriteIfcfg(self, interfaces, logger):
"""Write ifcfg files for multi-NIC support.
Overwrites the files. This allows us to update ifcfg-* in the future.
Disable the network setup to override this behavior and customize the
configurations.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
"""
for interface in interfaces:
interface_config = os.path.join(
self.network_path, 'ifcfg-%s' % interface)
interface_content = [
'# Added by Google.',
'STARTMODE=hotplug',
'BOOTPROTO=dhcp',
'DHCLIENT_SET_DEFAULT_ROUTE=yes',
'DHCLIENT_ROUTE_PRIORITY=10%s00' % interface,
'',
]
with open(interface_config, 'w') as interface_file:
interface_file.write('\n'.join(interface_content))
logger.info('Created ifcfg file for interface %s.', interface) | [
"def",
"_WriteIfcfg",
"(",
"self",
",",
"interfaces",
",",
"logger",
")",
":",
"for",
"interface",
"in",
"interfaces",
":",
"interface_config",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"network_path",
",",
"'ifcfg-%s'",
"%",
"interface",
")",... | Write ifcfg files for multi-NIC support.
Overwrites the files. This allows us to update ifcfg-* in the future.
Disable the network setup to override this behavior and customize the
configurations.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port. | [
"Write",
"ifcfg",
"files",
"for",
"multi",
"-",
"NIC",
"support",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/sles_12/utils.py#L47-L71 | train | 229,383 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/distro_lib/sles_12/utils.py | Utils._Ifup | def _Ifup(self, interfaces, logger):
"""Activate network interfaces.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
"""
ifup = ['/usr/sbin/wicked', 'ifup', '--timeout', '1']
try:
subprocess.check_call(ifup + interfaces)
except subprocess.CalledProcessError:
logger.warning('Could not activate interfaces %s.', interfaces) | python | def _Ifup(self, interfaces, logger):
"""Activate network interfaces.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port.
"""
ifup = ['/usr/sbin/wicked', 'ifup', '--timeout', '1']
try:
subprocess.check_call(ifup + interfaces)
except subprocess.CalledProcessError:
logger.warning('Could not activate interfaces %s.', interfaces) | [
"def",
"_Ifup",
"(",
"self",
",",
"interfaces",
",",
"logger",
")",
":",
"ifup",
"=",
"[",
"'/usr/sbin/wicked'",
",",
"'ifup'",
",",
"'--timeout'",
",",
"'1'",
"]",
"try",
":",
"subprocess",
".",
"check_call",
"(",
"ifup",
"+",
"interfaces",
")",
"except... | Activate network interfaces.
Args:
interfaces: list of string, the output device names to enable.
logger: logger object, used to write to SysLog and serial port. | [
"Activate",
"network",
"interfaces",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/distro_lib/sles_12/utils.py#L73-L84 | train | 229,384 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/networking/network_daemon.py | NetworkDaemon.HandleNetworkInterfaces | def HandleNetworkInterfaces(self, result):
"""Called when network interface metadata changes.
Args:
result: dict, the metadata response with the network interfaces.
"""
network_interfaces = self._ExtractInterfaceMetadata(result)
if self.network_setup_enabled:
self.network_setup.EnableNetworkInterfaces(
[interface.name for interface in network_interfaces[1:]])
for interface in network_interfaces:
if self.ip_forwarding_enabled:
self.ip_forwarding.HandleForwardedIps(
interface.name, interface.forwarded_ips, interface.ip) | python | def HandleNetworkInterfaces(self, result):
"""Called when network interface metadata changes.
Args:
result: dict, the metadata response with the network interfaces.
"""
network_interfaces = self._ExtractInterfaceMetadata(result)
if self.network_setup_enabled:
self.network_setup.EnableNetworkInterfaces(
[interface.name for interface in network_interfaces[1:]])
for interface in network_interfaces:
if self.ip_forwarding_enabled:
self.ip_forwarding.HandleForwardedIps(
interface.name, interface.forwarded_ips, interface.ip) | [
"def",
"HandleNetworkInterfaces",
"(",
"self",
",",
"result",
")",
":",
"network_interfaces",
"=",
"self",
".",
"_ExtractInterfaceMetadata",
"(",
"result",
")",
"if",
"self",
".",
"network_setup_enabled",
":",
"self",
".",
"network_setup",
".",
"EnableNetworkInterfa... | Called when network interface metadata changes.
Args:
result: dict, the metadata response with the network interfaces. | [
"Called",
"when",
"network",
"interface",
"metadata",
"changes",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/networking/network_daemon.py#L84-L99 | train | 229,385 |
GoogleCloudPlatform/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/networking/network_daemon.py | NetworkDaemon._ExtractInterfaceMetadata | def _ExtractInterfaceMetadata(self, metadata):
"""Extracts network interface metadata.
Args:
metadata: dict, the metadata response with the new network interfaces.
Returns:
list, a list of NetworkInterface objects.
"""
interfaces = []
for network_interface in metadata:
mac_address = network_interface.get('mac')
interface = self.network_utils.GetNetworkInterface(mac_address)
ip_addresses = []
if interface:
ip_addresses.extend(network_interface.get('forwardedIps', []))
if self.ip_aliases:
ip_addresses.extend(network_interface.get('ipAliases', []))
if self.target_instance_ips:
ip_addresses.extend(network_interface.get('targetInstanceIps', []))
interfaces.append(NetworkDaemon.NetworkInterface(
interface, ip_addresses, network_interface.get('ip', [])))
else:
message = 'Network interface not found for MAC address: %s.'
self.logger.warning(message, mac_address)
return interfaces | python | def _ExtractInterfaceMetadata(self, metadata):
"""Extracts network interface metadata.
Args:
metadata: dict, the metadata response with the new network interfaces.
Returns:
list, a list of NetworkInterface objects.
"""
interfaces = []
for network_interface in metadata:
mac_address = network_interface.get('mac')
interface = self.network_utils.GetNetworkInterface(mac_address)
ip_addresses = []
if interface:
ip_addresses.extend(network_interface.get('forwardedIps', []))
if self.ip_aliases:
ip_addresses.extend(network_interface.get('ipAliases', []))
if self.target_instance_ips:
ip_addresses.extend(network_interface.get('targetInstanceIps', []))
interfaces.append(NetworkDaemon.NetworkInterface(
interface, ip_addresses, network_interface.get('ip', [])))
else:
message = 'Network interface not found for MAC address: %s.'
self.logger.warning(message, mac_address)
return interfaces | [
"def",
"_ExtractInterfaceMetadata",
"(",
"self",
",",
"metadata",
")",
":",
"interfaces",
"=",
"[",
"]",
"for",
"network_interface",
"in",
"metadata",
":",
"mac_address",
"=",
"network_interface",
".",
"get",
"(",
"'mac'",
")",
"interface",
"=",
"self",
".",
... | Extracts network interface metadata.
Args:
metadata: dict, the metadata response with the new network interfaces.
Returns:
list, a list of NetworkInterface objects. | [
"Extracts",
"network",
"interface",
"metadata",
"."
] | 53ea8cd069fb4d9a1984d1c167e54c133033f8da | https://github.com/GoogleCloudPlatform/compute-image-packages/blob/53ea8cd069fb4d9a1984d1c167e54c133033f8da/packages/python-google-compute-engine/google_compute_engine/networking/network_daemon.py#L101-L126 | train | 229,386 |
sendgrid/python-http-client | python_http_client/client.py | Client._build_url | def _build_url(self, query_params):
"""Build the final URL to be passed to urllib
:param query_params: A dictionary of all the query parameters
:type query_params: dictionary
:return: string
"""
url = ''
count = 0
while count < len(self._url_path):
url += '/{}'.format(self._url_path[count])
count += 1
# add slash
if self.append_slash:
url += '/'
if query_params:
url_values = urlencode(sorted(query_params.items()), True)
url = '{}?{}'.format(url, url_values)
if self._version:
url = self._build_versioned_url(url)
else:
url = '{}{}'.format(self.host, url)
return url | python | def _build_url(self, query_params):
"""Build the final URL to be passed to urllib
:param query_params: A dictionary of all the query parameters
:type query_params: dictionary
:return: string
"""
url = ''
count = 0
while count < len(self._url_path):
url += '/{}'.format(self._url_path[count])
count += 1
# add slash
if self.append_slash:
url += '/'
if query_params:
url_values = urlencode(sorted(query_params.items()), True)
url = '{}?{}'.format(url, url_values)
if self._version:
url = self._build_versioned_url(url)
else:
url = '{}{}'.format(self.host, url)
return url | [
"def",
"_build_url",
"(",
"self",
",",
"query_params",
")",
":",
"url",
"=",
"''",
"count",
"=",
"0",
"while",
"count",
"<",
"len",
"(",
"self",
".",
"_url_path",
")",
":",
"url",
"+=",
"'/{}'",
".",
"format",
"(",
"self",
".",
"_url_path",
"[",
"c... | Build the final URL to be passed to urllib
:param query_params: A dictionary of all the query parameters
:type query_params: dictionary
:return: string | [
"Build",
"the",
"final",
"URL",
"to",
"be",
"passed",
"to",
"urllib"
] | fa72b743fbf1aa499cc4e34d6a690af3e16a7a4d | https://github.com/sendgrid/python-http-client/blob/fa72b743fbf1aa499cc4e34d6a690af3e16a7a4d/python_http_client/client.py#L107-L132 | train | 229,387 |
sendgrid/python-http-client | python_http_client/client.py | Client._build_client | def _build_client(self, name=None):
"""Make a new Client object
:param name: Name of the url segment
:type name: string
:return: A Client object
"""
url_path = self._url_path + [name] if name else self._url_path
return Client(host=self.host,
version=self._version,
request_headers=self.request_headers,
url_path=url_path,
append_slash=self.append_slash,
timeout=self.timeout) | python | def _build_client(self, name=None):
"""Make a new Client object
:param name: Name of the url segment
:type name: string
:return: A Client object
"""
url_path = self._url_path + [name] if name else self._url_path
return Client(host=self.host,
version=self._version,
request_headers=self.request_headers,
url_path=url_path,
append_slash=self.append_slash,
timeout=self.timeout) | [
"def",
"_build_client",
"(",
"self",
",",
"name",
"=",
"None",
")",
":",
"url_path",
"=",
"self",
".",
"_url_path",
"+",
"[",
"name",
"]",
"if",
"name",
"else",
"self",
".",
"_url_path",
"return",
"Client",
"(",
"host",
"=",
"self",
".",
"host",
",",... | Make a new Client object
:param name: Name of the url segment
:type name: string
:return: A Client object | [
"Make",
"a",
"new",
"Client",
"object"
] | fa72b743fbf1aa499cc4e34d6a690af3e16a7a4d | https://github.com/sendgrid/python-http-client/blob/fa72b743fbf1aa499cc4e34d6a690af3e16a7a4d/python_http_client/client.py#L143-L156 | train | 229,388 |
sendgrid/python-http-client | python_http_client/client.py | Client._make_request | def _make_request(self, opener, request, timeout=None):
"""Make the API call and return the response. This is separated into
it's own function, so we can mock it easily for testing.
:param opener:
:type opener:
:param request: url payload to request
:type request: urllib.Request object
:param timeout: timeout value or None
:type timeout: float
:return: urllib response
"""
timeout = timeout or self.timeout
try:
return opener.open(request, timeout=timeout)
except HTTPError as err:
exc = handle_error(err)
exc.__cause__ = None
raise exc | python | def _make_request(self, opener, request, timeout=None):
"""Make the API call and return the response. This is separated into
it's own function, so we can mock it easily for testing.
:param opener:
:type opener:
:param request: url payload to request
:type request: urllib.Request object
:param timeout: timeout value or None
:type timeout: float
:return: urllib response
"""
timeout = timeout or self.timeout
try:
return opener.open(request, timeout=timeout)
except HTTPError as err:
exc = handle_error(err)
exc.__cause__ = None
raise exc | [
"def",
"_make_request",
"(",
"self",
",",
"opener",
",",
"request",
",",
"timeout",
"=",
"None",
")",
":",
"timeout",
"=",
"timeout",
"or",
"self",
".",
"timeout",
"try",
":",
"return",
"opener",
".",
"open",
"(",
"request",
",",
"timeout",
"=",
"timeo... | Make the API call and return the response. This is separated into
it's own function, so we can mock it easily for testing.
:param opener:
:type opener:
:param request: url payload to request
:type request: urllib.Request object
:param timeout: timeout value or None
:type timeout: float
:return: urllib response | [
"Make",
"the",
"API",
"call",
"and",
"return",
"the",
"response",
".",
"This",
"is",
"separated",
"into",
"it",
"s",
"own",
"function",
"so",
"we",
"can",
"mock",
"it",
"easily",
"for",
"testing",
"."
] | fa72b743fbf1aa499cc4e34d6a690af3e16a7a4d | https://github.com/sendgrid/python-http-client/blob/fa72b743fbf1aa499cc4e34d6a690af3e16a7a4d/python_http_client/client.py#L158-L176 | train | 229,389 |
junzis/pyModeS | pyModeS/decoder/bds/bds08.py | category | def category(msg):
"""Aircraft category number
Args:
msg (string): 28 bytes hexadecimal message string
Returns:
int: category number
"""
if common.typecode(msg) < 1 or common.typecode(msg) > 4:
raise RuntimeError("%s: Not a identification message" % msg)
msgbin = common.hex2bin(msg)
return common.bin2int(msgbin[5:8]) | python | def category(msg):
"""Aircraft category number
Args:
msg (string): 28 bytes hexadecimal message string
Returns:
int: category number
"""
if common.typecode(msg) < 1 or common.typecode(msg) > 4:
raise RuntimeError("%s: Not a identification message" % msg)
msgbin = common.hex2bin(msg)
return common.bin2int(msgbin[5:8]) | [
"def",
"category",
"(",
"msg",
")",
":",
"if",
"common",
".",
"typecode",
"(",
"msg",
")",
"<",
"1",
"or",
"common",
".",
"typecode",
"(",
"msg",
")",
">",
"4",
":",
"raise",
"RuntimeError",
"(",
"\"%s: Not a identification message\"",
"%",
"msg",
")",
... | Aircraft category number
Args:
msg (string): 28 bytes hexadecimal message string
Returns:
int: category number | [
"Aircraft",
"category",
"number"
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/bds/bds08.py#L26-L40 | train | 229,390 |
junzis/pyModeS | pyModeS/decoder/bds/bds05.py | airborne_position | def airborne_position(msg0, msg1, t0, t1):
"""Decode airborn position from a pair of even and odd position message
Args:
msg0 (string): even message (28 bytes hexadecimal string)
msg1 (string): odd message (28 bytes hexadecimal string)
t0 (int): timestamps for the even message
t1 (int): timestamps for the odd message
Returns:
(float, float): (latitude, longitude) of the aircraft
"""
mb0 = common.hex2bin(msg0)[32:]
mb1 = common.hex2bin(msg1)[32:]
# 131072 is 2^17, since CPR lat and lon are 17 bits each.
cprlat_even = common.bin2int(mb0[22:39]) / 131072.0
cprlon_even = common.bin2int(mb0[39:56]) / 131072.0
cprlat_odd = common.bin2int(mb1[22:39]) / 131072.0
cprlon_odd = common.bin2int(mb1[39:56]) / 131072.0
air_d_lat_even = 360.0 / 60
air_d_lat_odd = 360.0 / 59
# compute latitude index 'j'
j = common.floor(59 * cprlat_even - 60 * cprlat_odd + 0.5)
lat_even = float(air_d_lat_even * (j % 60 + cprlat_even))
lat_odd = float(air_d_lat_odd * (j % 59 + cprlat_odd))
if lat_even >= 270:
lat_even = lat_even - 360
if lat_odd >= 270:
lat_odd = lat_odd - 360
# check if both are in the same latidude zone, exit if not
if common.cprNL(lat_even) != common.cprNL(lat_odd):
return None
# compute ni, longitude index m, and longitude
if (t0 > t1):
lat = lat_even
nl = common.cprNL(lat)
ni = max(common.cprNL(lat)- 0, 1)
m = common.floor(cprlon_even * (nl-1) - cprlon_odd * nl + 0.5)
lon = (360.0 / ni) * (m % ni + cprlon_even)
else:
lat = lat_odd
nl = common.cprNL(lat)
ni = max(common.cprNL(lat) - 1, 1)
m = common.floor(cprlon_even * (nl-1) - cprlon_odd * nl + 0.5)
lon = (360.0 / ni) * (m % ni + cprlon_odd)
if lon > 180:
lon = lon - 360
return round(lat, 5), round(lon, 5) | python | def airborne_position(msg0, msg1, t0, t1):
"""Decode airborn position from a pair of even and odd position message
Args:
msg0 (string): even message (28 bytes hexadecimal string)
msg1 (string): odd message (28 bytes hexadecimal string)
t0 (int): timestamps for the even message
t1 (int): timestamps for the odd message
Returns:
(float, float): (latitude, longitude) of the aircraft
"""
mb0 = common.hex2bin(msg0)[32:]
mb1 = common.hex2bin(msg1)[32:]
# 131072 is 2^17, since CPR lat and lon are 17 bits each.
cprlat_even = common.bin2int(mb0[22:39]) / 131072.0
cprlon_even = common.bin2int(mb0[39:56]) / 131072.0
cprlat_odd = common.bin2int(mb1[22:39]) / 131072.0
cprlon_odd = common.bin2int(mb1[39:56]) / 131072.0
air_d_lat_even = 360.0 / 60
air_d_lat_odd = 360.0 / 59
# compute latitude index 'j'
j = common.floor(59 * cprlat_even - 60 * cprlat_odd + 0.5)
lat_even = float(air_d_lat_even * (j % 60 + cprlat_even))
lat_odd = float(air_d_lat_odd * (j % 59 + cprlat_odd))
if lat_even >= 270:
lat_even = lat_even - 360
if lat_odd >= 270:
lat_odd = lat_odd - 360
# check if both are in the same latidude zone, exit if not
if common.cprNL(lat_even) != common.cprNL(lat_odd):
return None
# compute ni, longitude index m, and longitude
if (t0 > t1):
lat = lat_even
nl = common.cprNL(lat)
ni = max(common.cprNL(lat)- 0, 1)
m = common.floor(cprlon_even * (nl-1) - cprlon_odd * nl + 0.5)
lon = (360.0 / ni) * (m % ni + cprlon_even)
else:
lat = lat_odd
nl = common.cprNL(lat)
ni = max(common.cprNL(lat) - 1, 1)
m = common.floor(cprlon_even * (nl-1) - cprlon_odd * nl + 0.5)
lon = (360.0 / ni) * (m % ni + cprlon_odd)
if lon > 180:
lon = lon - 360
return round(lat, 5), round(lon, 5) | [
"def",
"airborne_position",
"(",
"msg0",
",",
"msg1",
",",
"t0",
",",
"t1",
")",
":",
"mb0",
"=",
"common",
".",
"hex2bin",
"(",
"msg0",
")",
"[",
"32",
":",
"]",
"mb1",
"=",
"common",
".",
"hex2bin",
"(",
"msg1",
")",
"[",
"32",
":",
"]",
"# 1... | Decode airborn position from a pair of even and odd position message
Args:
msg0 (string): even message (28 bytes hexadecimal string)
msg1 (string): odd message (28 bytes hexadecimal string)
t0 (int): timestamps for the even message
t1 (int): timestamps for the odd message
Returns:
(float, float): (latitude, longitude) of the aircraft | [
"Decode",
"airborn",
"position",
"from",
"a",
"pair",
"of",
"even",
"and",
"odd",
"position",
"message"
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/bds/bds05.py#L27-L85 | train | 229,391 |
junzis/pyModeS | pyModeS/decoder/bds/bds05.py | airborne_position_with_ref | def airborne_position_with_ref(msg, lat_ref, lon_ref):
"""Decode airborne position with only one message,
knowing reference nearby location, such as previously calculated location,
ground station, or airport location, etc. The reference position shall
be with in 180NM of the true position.
Args:
msg (string): even message (28 bytes hexadecimal string)
lat_ref: previous known latitude
lon_ref: previous known longitude
Returns:
(float, float): (latitude, longitude) of the aircraft
"""
mb = common.hex2bin(msg)[32:]
cprlat = common.bin2int(mb[22:39]) / 131072.0
cprlon = common.bin2int(mb[39:56]) / 131072.0
i = int(mb[21])
d_lat = 360.0/59 if i else 360.0/60
j = common.floor(lat_ref / d_lat) \
+ common.floor(0.5 + ((lat_ref % d_lat) / d_lat) - cprlat)
lat = d_lat * (j + cprlat)
ni = common.cprNL(lat) - i
if ni > 0:
d_lon = 360.0 / ni
else:
d_lon = 360.0
m = common.floor(lon_ref / d_lon) \
+ common.floor(0.5 + ((lon_ref % d_lon) / d_lon) - cprlon)
lon = d_lon * (m + cprlon)
return round(lat, 5), round(lon, 5) | python | def airborne_position_with_ref(msg, lat_ref, lon_ref):
"""Decode airborne position with only one message,
knowing reference nearby location, such as previously calculated location,
ground station, or airport location, etc. The reference position shall
be with in 180NM of the true position.
Args:
msg (string): even message (28 bytes hexadecimal string)
lat_ref: previous known latitude
lon_ref: previous known longitude
Returns:
(float, float): (latitude, longitude) of the aircraft
"""
mb = common.hex2bin(msg)[32:]
cprlat = common.bin2int(mb[22:39]) / 131072.0
cprlon = common.bin2int(mb[39:56]) / 131072.0
i = int(mb[21])
d_lat = 360.0/59 if i else 360.0/60
j = common.floor(lat_ref / d_lat) \
+ common.floor(0.5 + ((lat_ref % d_lat) / d_lat) - cprlat)
lat = d_lat * (j + cprlat)
ni = common.cprNL(lat) - i
if ni > 0:
d_lon = 360.0 / ni
else:
d_lon = 360.0
m = common.floor(lon_ref / d_lon) \
+ common.floor(0.5 + ((lon_ref % d_lon) / d_lon) - cprlon)
lon = d_lon * (m + cprlon)
return round(lat, 5), round(lon, 5) | [
"def",
"airborne_position_with_ref",
"(",
"msg",
",",
"lat_ref",
",",
"lon_ref",
")",
":",
"mb",
"=",
"common",
".",
"hex2bin",
"(",
"msg",
")",
"[",
"32",
":",
"]",
"cprlat",
"=",
"common",
".",
"bin2int",
"(",
"mb",
"[",
"22",
":",
"39",
"]",
")"... | Decode airborne position with only one message,
knowing reference nearby location, such as previously calculated location,
ground station, or airport location, etc. The reference position shall
be with in 180NM of the true position.
Args:
msg (string): even message (28 bytes hexadecimal string)
lat_ref: previous known latitude
lon_ref: previous known longitude
Returns:
(float, float): (latitude, longitude) of the aircraft | [
"Decode",
"airborne",
"position",
"with",
"only",
"one",
"message",
"knowing",
"reference",
"nearby",
"location",
"such",
"as",
"previously",
"calculated",
"location",
"ground",
"station",
"or",
"airport",
"location",
"etc",
".",
"The",
"reference",
"position",
"s... | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/bds/bds05.py#L88-L129 | train | 229,392 |
junzis/pyModeS | pyModeS/decoder/common.py | hex2bin | def hex2bin(hexstr):
"""Convert a hexdecimal string to binary string, with zero fillings. """
num_of_bits = len(hexstr) * 4
binstr = bin(int(hexstr, 16))[2:].zfill(int(num_of_bits))
return binstr | python | def hex2bin(hexstr):
"""Convert a hexdecimal string to binary string, with zero fillings. """
num_of_bits = len(hexstr) * 4
binstr = bin(int(hexstr, 16))[2:].zfill(int(num_of_bits))
return binstr | [
"def",
"hex2bin",
"(",
"hexstr",
")",
":",
"num_of_bits",
"=",
"len",
"(",
"hexstr",
")",
"*",
"4",
"binstr",
"=",
"bin",
"(",
"int",
"(",
"hexstr",
",",
"16",
")",
")",
"[",
"2",
":",
"]",
".",
"zfill",
"(",
"int",
"(",
"num_of_bits",
")",
")"... | Convert a hexdecimal string to binary string, with zero fillings. | [
"Convert",
"a",
"hexdecimal",
"string",
"to",
"binary",
"string",
"with",
"zero",
"fillings",
"."
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/common.py#L4-L8 | train | 229,393 |
junzis/pyModeS | pyModeS/decoder/common.py | icao | def icao(msg):
"""Calculate the ICAO address from an Mode-S message
with DF4, DF5, DF20, DF21
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
String: ICAO address in 6 bytes hexadecimal string
"""
DF = df(msg)
if DF in (11, 17, 18):
addr = msg[2:8]
elif DF in (0, 4, 5, 16, 20, 21):
c0 = bin2int(crc(msg, encode=True))
c1 = hex2int(msg[-6:])
addr = '%06X' % (c0 ^ c1)
else:
addr = None
return addr | python | def icao(msg):
"""Calculate the ICAO address from an Mode-S message
with DF4, DF5, DF20, DF21
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
String: ICAO address in 6 bytes hexadecimal string
"""
DF = df(msg)
if DF in (11, 17, 18):
addr = msg[2:8]
elif DF in (0, 4, 5, 16, 20, 21):
c0 = bin2int(crc(msg, encode=True))
c1 = hex2int(msg[-6:])
addr = '%06X' % (c0 ^ c1)
else:
addr = None
return addr | [
"def",
"icao",
"(",
"msg",
")",
":",
"DF",
"=",
"df",
"(",
"msg",
")",
"if",
"DF",
"in",
"(",
"11",
",",
"17",
",",
"18",
")",
":",
"addr",
"=",
"msg",
"[",
"2",
":",
"8",
"]",
"elif",
"DF",
"in",
"(",
"0",
",",
"4",
",",
"5",
",",
"1... | Calculate the ICAO address from an Mode-S message
with DF4, DF5, DF20, DF21
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
String: ICAO address in 6 bytes hexadecimal string | [
"Calculate",
"the",
"ICAO",
"address",
"from",
"an",
"Mode",
"-",
"S",
"message",
"with",
"DF4",
"DF5",
"DF20",
"DF21"
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/common.py#L79-L101 | train | 229,394 |
junzis/pyModeS | pyModeS/decoder/common.py | gray2int | def gray2int(graystr):
"""Convert greycode to binary"""
num = bin2int(graystr)
num ^= (num >> 8)
num ^= (num >> 4)
num ^= (num >> 2)
num ^= (num >> 1)
return num | python | def gray2int(graystr):
"""Convert greycode to binary"""
num = bin2int(graystr)
num ^= (num >> 8)
num ^= (num >> 4)
num ^= (num >> 2)
num ^= (num >> 1)
return num | [
"def",
"gray2int",
"(",
"graystr",
")",
":",
"num",
"=",
"bin2int",
"(",
"graystr",
")",
"num",
"^=",
"(",
"num",
">>",
"8",
")",
"num",
"^=",
"(",
"num",
">>",
"4",
")",
"num",
"^=",
"(",
"num",
">>",
"2",
")",
"num",
"^=",
"(",
"num",
">>",... | Convert greycode to binary | [
"Convert",
"greycode",
"to",
"binary"
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/common.py#L268-L275 | train | 229,395 |
junzis/pyModeS | pyModeS/decoder/common.py | allzeros | def allzeros(msg):
"""check if the data bits are all zeros
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
bool: True or False
"""
d = hex2bin(data(msg))
if bin2int(d) > 0:
return False
else:
return True | python | def allzeros(msg):
"""check if the data bits are all zeros
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
bool: True or False
"""
d = hex2bin(data(msg))
if bin2int(d) > 0:
return False
else:
return True | [
"def",
"allzeros",
"(",
"msg",
")",
":",
"d",
"=",
"hex2bin",
"(",
"data",
"(",
"msg",
")",
")",
"if",
"bin2int",
"(",
"d",
")",
">",
"0",
":",
"return",
"False",
"else",
":",
"return",
"True"
] | check if the data bits are all zeros
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
bool: True or False | [
"check",
"if",
"the",
"data",
"bits",
"are",
"all",
"zeros"
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/common.py#L283-L297 | train | 229,396 |
junzis/pyModeS | pyModeS/decoder/common.py | wrongstatus | def wrongstatus(data, sb, msb, lsb):
"""Check if the status bit and field bits are consistency. This Function
is used for checking BDS code versions.
"""
# status bit, most significant bit, least significant bit
status = int(data[sb-1])
value = bin2int(data[msb-1:lsb])
if not status:
if value != 0:
return True
return False | python | def wrongstatus(data, sb, msb, lsb):
"""Check if the status bit and field bits are consistency. This Function
is used for checking BDS code versions.
"""
# status bit, most significant bit, least significant bit
status = int(data[sb-1])
value = bin2int(data[msb-1:lsb])
if not status:
if value != 0:
return True
return False | [
"def",
"wrongstatus",
"(",
"data",
",",
"sb",
",",
"msb",
",",
"lsb",
")",
":",
"# status bit, most significant bit, least significant bit",
"status",
"=",
"int",
"(",
"data",
"[",
"sb",
"-",
"1",
"]",
")",
"value",
"=",
"bin2int",
"(",
"data",
"[",
"msb",... | Check if the status bit and field bits are consistency. This Function
is used for checking BDS code versions. | [
"Check",
"if",
"the",
"status",
"bit",
"and",
"field",
"bits",
"are",
"consistency",
".",
"This",
"Function",
"is",
"used",
"for",
"checking",
"BDS",
"code",
"versions",
"."
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/common.py#L300-L313 | train | 229,397 |
junzis/pyModeS | pyModeS/decoder/adsb.py | version | def version(msg):
"""ADS-B Version
Args:
msg (string): 28 bytes hexadecimal message string, TC = 31
Returns:
int: version number
"""
tc = typecode(msg)
if tc != 31:
raise RuntimeError("%s: Not a status operation message, expecting TC = 31" % msg)
msgbin = common.hex2bin(msg)
version = common.bin2int(msgbin[72:75])
return version | python | def version(msg):
"""ADS-B Version
Args:
msg (string): 28 bytes hexadecimal message string, TC = 31
Returns:
int: version number
"""
tc = typecode(msg)
if tc != 31:
raise RuntimeError("%s: Not a status operation message, expecting TC = 31" % msg)
msgbin = common.hex2bin(msg)
version = common.bin2int(msgbin[72:75])
return version | [
"def",
"version",
"(",
"msg",
")",
":",
"tc",
"=",
"typecode",
"(",
"msg",
")",
"if",
"tc",
"!=",
"31",
":",
"raise",
"RuntimeError",
"(",
"\"%s: Not a status operation message, expecting TC = 31\"",
"%",
"msg",
")",
"msgbin",
"=",
"common",
".",
"hex2bin",
... | ADS-B Version
Args:
msg (string): 28 bytes hexadecimal message string, TC = 31
Returns:
int: version number | [
"ADS",
"-",
"B",
"Version"
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/adsb.py#L194-L211 | train | 229,398 |
junzis/pyModeS | pyModeS/decoder/adsb.py | nic_v1 | def nic_v1(msg, NICs):
"""Calculate NIC, navigation integrity category, for ADS-B version 1
Args:
msg (string): 28 bytes hexadecimal message string
NICs (int or string): NIC supplement
Returns:
int or string: Horizontal Radius of Containment
int or string: Vertical Protection Limit
"""
if typecode(msg) < 5 or typecode(msg) > 22:
raise RuntimeError(
"%s: Not a surface position message (5<TC<8), \
airborne position message (8<TC<19), \
or airborne position with GNSS height (20<TC<22)" % msg
)
tc = typecode(msg)
NIC = uncertainty.TC_NICv1_lookup[tc]
if isinstance(NIC, dict):
NIC = NIC[NICs]
try:
Rc = uncertainty.NICv1[NIC][NICs]['Rc']
VPL = uncertainty.NICv1[NIC][NICs]['VPL']
except KeyError:
Rc, VPL = uncertainty.NA, uncertainty.NA
return Rc, VPL | python | def nic_v1(msg, NICs):
"""Calculate NIC, navigation integrity category, for ADS-B version 1
Args:
msg (string): 28 bytes hexadecimal message string
NICs (int or string): NIC supplement
Returns:
int or string: Horizontal Radius of Containment
int or string: Vertical Protection Limit
"""
if typecode(msg) < 5 or typecode(msg) > 22:
raise RuntimeError(
"%s: Not a surface position message (5<TC<8), \
airborne position message (8<TC<19), \
or airborne position with GNSS height (20<TC<22)" % msg
)
tc = typecode(msg)
NIC = uncertainty.TC_NICv1_lookup[tc]
if isinstance(NIC, dict):
NIC = NIC[NICs]
try:
Rc = uncertainty.NICv1[NIC][NICs]['Rc']
VPL = uncertainty.NICv1[NIC][NICs]['VPL']
except KeyError:
Rc, VPL = uncertainty.NA, uncertainty.NA
return Rc, VPL | [
"def",
"nic_v1",
"(",
"msg",
",",
"NICs",
")",
":",
"if",
"typecode",
"(",
"msg",
")",
"<",
"5",
"or",
"typecode",
"(",
"msg",
")",
">",
"22",
":",
"raise",
"RuntimeError",
"(",
"\"%s: Not a surface position message (5<TC<8), \\\n airborne position mess... | Calculate NIC, navigation integrity category, for ADS-B version 1
Args:
msg (string): 28 bytes hexadecimal message string
NICs (int or string): NIC supplement
Returns:
int or string: Horizontal Radius of Containment
int or string: Vertical Protection Limit | [
"Calculate",
"NIC",
"navigation",
"integrity",
"category",
"for",
"ADS",
"-",
"B",
"version",
"1"
] | 8cd5655a04b08171a9ad5f1ffd232b7e0178ea53 | https://github.com/junzis/pyModeS/blob/8cd5655a04b08171a9ad5f1ffd232b7e0178ea53/pyModeS/decoder/adsb.py#L278-L308 | train | 229,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.