repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
PagerDuty/pagerduty-api-python-client | pypd/models/user.py | User.delete_contact_method | def delete_contact_method(self, id, **kwargs):
"""Delete a contact method for this user."""
endpoint = '{0}/{1}/contact_methods/{2}'.format(
self.endpoint,
self['id'],
id,
)
return self.request('DELETE', endpoint=endpoint, query_params=kwargs) | python | def delete_contact_method(self, id, **kwargs):
"""Delete a contact method for this user."""
endpoint = '{0}/{1}/contact_methods/{2}'.format(
self.endpoint,
self['id'],
id,
)
return self.request('DELETE', endpoint=endpoint, query_params=kwargs) | [
"def",
"delete_contact_method",
"(",
"self",
",",
"id",
",",
"*",
"*",
"kwargs",
")",
":",
"endpoint",
"=",
"'{0}/{1}/contact_methods/{2}'",
".",
"format",
"(",
"self",
".",
"endpoint",
",",
"self",
"[",
"'id'",
"]",
",",
"id",
",",
")",
"return",
"self"... | Delete a contact method for this user. | [
"Delete",
"a",
"contact",
"method",
"for",
"this",
"user",
"."
] | f420b34ca9b29689cc2ecc9adca6dc5d56ae7161 | https://github.com/PagerDuty/pagerduty-api-python-client/blob/f420b34ca9b29689cc2ecc9adca6dc5d56ae7161/pypd/models/user.py#L55-L62 | train | 211,900 |
PagerDuty/pagerduty-api-python-client | pypd/models/user.py | User.get_contact_method | def get_contact_method(self, id, **kwargs):
"""Get a contact method for this user."""
endpoint = '{0}/{1}/contact_methods/{2}'.format(
self.endpoint,
self['id'],
id,
)
result = self.request('GET', endpoint=endpoint, query_params=kwargs)
return result['contact_method'] | python | def get_contact_method(self, id, **kwargs):
"""Get a contact method for this user."""
endpoint = '{0}/{1}/contact_methods/{2}'.format(
self.endpoint,
self['id'],
id,
)
result = self.request('GET', endpoint=endpoint, query_params=kwargs)
return result['contact_method'] | [
"def",
"get_contact_method",
"(",
"self",
",",
"id",
",",
"*",
"*",
"kwargs",
")",
":",
"endpoint",
"=",
"'{0}/{1}/contact_methods/{2}'",
".",
"format",
"(",
"self",
".",
"endpoint",
",",
"self",
"[",
"'id'",
"]",
",",
"id",
",",
")",
"result",
"=",
"s... | Get a contact method for this user. | [
"Get",
"a",
"contact",
"method",
"for",
"this",
"user",
"."
] | f420b34ca9b29689cc2ecc9adca6dc5d56ae7161 | https://github.com/PagerDuty/pagerduty-api-python-client/blob/f420b34ca9b29689cc2ecc9adca6dc5d56ae7161/pypd/models/user.py#L64-L72 | train | 211,901 |
PagerDuty/pagerduty-api-python-client | pypd/models/user.py | User.notification_rules | def notification_rules(self, **kwargs):
"""Get all notification rules for this user."""
endpoint = '{0}/{1}/notification_rules'.format(
self.endpoint,
self['id'],
)
result = self.request('GET', endpoint=endpoint, query_params=kwargs)
return result['notification_rules'] | python | def notification_rules(self, **kwargs):
"""Get all notification rules for this user."""
endpoint = '{0}/{1}/notification_rules'.format(
self.endpoint,
self['id'],
)
result = self.request('GET', endpoint=endpoint, query_params=kwargs)
return result['notification_rules'] | [
"def",
"notification_rules",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"endpoint",
"=",
"'{0}/{1}/notification_rules'",
".",
"format",
"(",
"self",
".",
"endpoint",
",",
"self",
"[",
"'id'",
"]",
",",
")",
"result",
"=",
"self",
".",
"request",
"("... | Get all notification rules for this user. | [
"Get",
"all",
"notification",
"rules",
"for",
"this",
"user",
"."
] | f420b34ca9b29689cc2ecc9adca6dc5d56ae7161 | https://github.com/PagerDuty/pagerduty-api-python-client/blob/f420b34ca9b29689cc2ecc9adca6dc5d56ae7161/pypd/models/user.py#L74-L81 | train | 211,902 |
PagerDuty/pagerduty-api-python-client | pypd/models/user.py | User.create_notification_rule | def create_notification_rule(self, data, **kwargs):
"""Create a notification rule for this user."""
data = {'notification_rule': data, }
endpoint = '{0}/{1}/notification_rules'.format(
self.endpoint,
self['id'],
)
result = self.request('POST', endpoint=endpoint, data=data,
query_params=kwargs)
self._data['notification_rules'].append(result['notification_rule'])
return result | python | def create_notification_rule(self, data, **kwargs):
"""Create a notification rule for this user."""
data = {'notification_rule': data, }
endpoint = '{0}/{1}/notification_rules'.format(
self.endpoint,
self['id'],
)
result = self.request('POST', endpoint=endpoint, data=data,
query_params=kwargs)
self._data['notification_rules'].append(result['notification_rule'])
return result | [
"def",
"create_notification_rule",
"(",
"self",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"{",
"'notification_rule'",
":",
"data",
",",
"}",
"endpoint",
"=",
"'{0}/{1}/notification_rules'",
".",
"format",
"(",
"self",
".",
"endpoint",
",",... | Create a notification rule for this user. | [
"Create",
"a",
"notification",
"rule",
"for",
"this",
"user",
"."
] | f420b34ca9b29689cc2ecc9adca6dc5d56ae7161 | https://github.com/PagerDuty/pagerduty-api-python-client/blob/f420b34ca9b29689cc2ecc9adca6dc5d56ae7161/pypd/models/user.py#L93-L103 | train | 211,903 |
PagerDuty/pagerduty-api-python-client | pypd/models/user.py | User.delete_notification_rule | def delete_notification_rule(self, id, **kwargs):
"""Get a notification rule for this user."""
endpoint = '{0}/{1}/notification_rules/{2}'.format(
self.endpoint,
self['id'],
id,
)
return self.request('DELETE', endpoint=endpoint, query_params=kwargs) | python | def delete_notification_rule(self, id, **kwargs):
"""Get a notification rule for this user."""
endpoint = '{0}/{1}/notification_rules/{2}'.format(
self.endpoint,
self['id'],
id,
)
return self.request('DELETE', endpoint=endpoint, query_params=kwargs) | [
"def",
"delete_notification_rule",
"(",
"self",
",",
"id",
",",
"*",
"*",
"kwargs",
")",
":",
"endpoint",
"=",
"'{0}/{1}/notification_rules/{2}'",
".",
"format",
"(",
"self",
".",
"endpoint",
",",
"self",
"[",
"'id'",
"]",
",",
"id",
",",
")",
"return",
... | Get a notification rule for this user. | [
"Get",
"a",
"notification",
"rule",
"for",
"this",
"user",
"."
] | f420b34ca9b29689cc2ecc9adca6dc5d56ae7161 | https://github.com/PagerDuty/pagerduty-api-python-client/blob/f420b34ca9b29689cc2ecc9adca6dc5d56ae7161/pypd/models/user.py#L105-L112 | train | 211,904 |
PagerDuty/pagerduty-api-python-client | pypd/models/add_ons.py | AddOn.install | def install(cls, type_, name, src, *args, **kwargs):
"""Install an add-on to this account."""
data = kwargs.pop('data', None)
if data is None:
data = {
'addon': {
'type': type_,
'name': name,
'src': src,
}
}
cls.create(data=data, *args, **kwargs) | python | def install(cls, type_, name, src, *args, **kwargs):
"""Install an add-on to this account."""
data = kwargs.pop('data', None)
if data is None:
data = {
'addon': {
'type': type_,
'name': name,
'src': src,
}
}
cls.create(data=data, *args, **kwargs) | [
"def",
"install",
"(",
"cls",
",",
"type_",
",",
"name",
",",
"src",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"kwargs",
".",
"pop",
"(",
"'data'",
",",
"None",
")",
"if",
"data",
"is",
"None",
":",
"data",
"=",
"{",
"... | Install an add-on to this account. | [
"Install",
"an",
"add",
"-",
"on",
"to",
"this",
"account",
"."
] | f420b34ca9b29689cc2ecc9adca6dc5d56ae7161 | https://github.com/PagerDuty/pagerduty-api-python-client/blob/f420b34ca9b29689cc2ecc9adca6dc5d56ae7161/pypd/models/add_ons.py#L12-L23 | train | 211,905 |
PagerDuty/pagerduty-api-python-client | pypd/models/vendor.py | Vendor.create | def create(cls, data=None, *args, **kwargs):
"""Validate and then create a Vendor entity."""
cls.validate(data)
# otherwise endpoint should contain the service path too
getattr(Entity, 'create').__func__(cls, data=data, *args, **kwargs) | python | def create(cls, data=None, *args, **kwargs):
"""Validate and then create a Vendor entity."""
cls.validate(data)
# otherwise endpoint should contain the service path too
getattr(Entity, 'create').__func__(cls, data=data, *args, **kwargs) | [
"def",
"create",
"(",
"cls",
",",
"data",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"cls",
".",
"validate",
"(",
"data",
")",
"# otherwise endpoint should contain the service path too",
"getattr",
"(",
"Entity",
",",
"'create'",
")",
... | Validate and then create a Vendor entity. | [
"Validate",
"and",
"then",
"create",
"a",
"Vendor",
"entity",
"."
] | f420b34ca9b29689cc2ecc9adca6dc5d56ae7161 | https://github.com/PagerDuty/pagerduty-api-python-client/blob/f420b34ca9b29689cc2ecc9adca6dc5d56ae7161/pypd/models/vendor.py#L34-L38 | train | 211,906 |
Tinche/cattrs | src/cattr/converters.py | Converter.unstruct_strat | def unstruct_strat(self):
# type: () -> UnstructureStrategy
"""The default way of unstructuring ``attrs`` classes."""
return (
UnstructureStrategy.AS_DICT
if self._unstructure_attrs == self.unstructure_attrs_asdict
else UnstructureStrategy.AS_TUPLE
) | python | def unstruct_strat(self):
# type: () -> UnstructureStrategy
"""The default way of unstructuring ``attrs`` classes."""
return (
UnstructureStrategy.AS_DICT
if self._unstructure_attrs == self.unstructure_attrs_asdict
else UnstructureStrategy.AS_TUPLE
) | [
"def",
"unstruct_strat",
"(",
"self",
")",
":",
"# type: () -> UnstructureStrategy",
"return",
"(",
"UnstructureStrategy",
".",
"AS_DICT",
"if",
"self",
".",
"_unstructure_attrs",
"==",
"self",
".",
"unstructure_attrs_asdict",
"else",
"UnstructureStrategy",
".",
"AS_TUP... | The default way of unstructuring ``attrs`` classes. | [
"The",
"default",
"way",
"of",
"unstructuring",
"attrs",
"classes",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L142-L149 | train | 211,907 |
Tinche/cattrs | src/cattr/converters.py | Converter.register_structure_hook | def register_structure_hook(self, cl, func):
"""Register a primitive-to-class converter function for a type.
The converter function should take two arguments:
* a Python object to be converted,
* the type to convert to
and return the instance of the class. The type may seem redundant, but
is sometimes needed (for example, when dealing with generic classes).
"""
if is_union_type(cl):
self._union_registry[cl] = func
else:
self._structure_func.register_cls_list([(cl, func)]) | python | def register_structure_hook(self, cl, func):
"""Register a primitive-to-class converter function for a type.
The converter function should take two arguments:
* a Python object to be converted,
* the type to convert to
and return the instance of the class. The type may seem redundant, but
is sometimes needed (for example, when dealing with generic classes).
"""
if is_union_type(cl):
self._union_registry[cl] = func
else:
self._structure_func.register_cls_list([(cl, func)]) | [
"def",
"register_structure_hook",
"(",
"self",
",",
"cl",
",",
"func",
")",
":",
"if",
"is_union_type",
"(",
"cl",
")",
":",
"self",
".",
"_union_registry",
"[",
"cl",
"]",
"=",
"func",
"else",
":",
"self",
".",
"_structure_func",
".",
"register_cls_list",... | Register a primitive-to-class converter function for a type.
The converter function should take two arguments:
* a Python object to be converted,
* the type to convert to
and return the instance of the class. The type may seem redundant, but
is sometimes needed (for example, when dealing with generic classes). | [
"Register",
"a",
"primitive",
"-",
"to",
"-",
"class",
"converter",
"function",
"for",
"a",
"type",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L166-L179 | train | 211,908 |
Tinche/cattrs | src/cattr/converters.py | Converter.structure | def structure(self, obj, cl):
# type: (Any, Type[T]) -> T
"""Convert unstructured Python data structures to structured data."""
return self._structure_func.dispatch(cl)(obj, cl) | python | def structure(self, obj, cl):
# type: (Any, Type[T]) -> T
"""Convert unstructured Python data structures to structured data."""
return self._structure_func.dispatch(cl)(obj, cl) | [
"def",
"structure",
"(",
"self",
",",
"obj",
",",
"cl",
")",
":",
"# type: (Any, Type[T]) -> T",
"return",
"self",
".",
"_structure_func",
".",
"dispatch",
"(",
"cl",
")",
"(",
"obj",
",",
"cl",
")"
] | Convert unstructured Python data structures to structured data. | [
"Convert",
"unstructured",
"Python",
"data",
"structures",
"to",
"structured",
"data",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L188-L192 | train | 211,909 |
Tinche/cattrs | src/cattr/converters.py | Converter.unstructure_attrs_asdict | def unstructure_attrs_asdict(self, obj):
# type: (Any) -> Dict[str, Any]
"""Our version of `attrs.asdict`, so we can call back to us."""
attrs = obj.__class__.__attrs_attrs__
dispatch = self._unstructure_func.dispatch
rv = self._dict_factory()
for a in attrs:
name = a.name
v = getattr(obj, name)
rv[name] = dispatch(v.__class__)(v)
return rv | python | def unstructure_attrs_asdict(self, obj):
# type: (Any) -> Dict[str, Any]
"""Our version of `attrs.asdict`, so we can call back to us."""
attrs = obj.__class__.__attrs_attrs__
dispatch = self._unstructure_func.dispatch
rv = self._dict_factory()
for a in attrs:
name = a.name
v = getattr(obj, name)
rv[name] = dispatch(v.__class__)(v)
return rv | [
"def",
"unstructure_attrs_asdict",
"(",
"self",
",",
"obj",
")",
":",
"# type: (Any) -> Dict[str, Any]",
"attrs",
"=",
"obj",
".",
"__class__",
".",
"__attrs_attrs__",
"dispatch",
"=",
"self",
".",
"_unstructure_func",
".",
"dispatch",
"rv",
"=",
"self",
".",
"_... | Our version of `attrs.asdict`, so we can call back to us. | [
"Our",
"version",
"of",
"attrs",
".",
"asdict",
"so",
"we",
"can",
"call",
"back",
"to",
"us",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L195-L205 | train | 211,910 |
Tinche/cattrs | src/cattr/converters.py | Converter.unstructure_attrs_astuple | def unstructure_attrs_astuple(self, obj):
# type: (Any) -> Tuple
"""Our version of `attrs.astuple`, so we can call back to us."""
attrs = obj.__class__.__attrs_attrs__
return tuple(self.unstructure(getattr(obj, a.name)) for a in attrs) | python | def unstructure_attrs_astuple(self, obj):
# type: (Any) -> Tuple
"""Our version of `attrs.astuple`, so we can call back to us."""
attrs = obj.__class__.__attrs_attrs__
return tuple(self.unstructure(getattr(obj, a.name)) for a in attrs) | [
"def",
"unstructure_attrs_astuple",
"(",
"self",
",",
"obj",
")",
":",
"# type: (Any) -> Tuple",
"attrs",
"=",
"obj",
".",
"__class__",
".",
"__attrs_attrs__",
"return",
"tuple",
"(",
"self",
".",
"unstructure",
"(",
"getattr",
"(",
"obj",
",",
"a",
".",
"na... | Our version of `attrs.astuple`, so we can call back to us. | [
"Our",
"version",
"of",
"attrs",
".",
"astuple",
"so",
"we",
"can",
"call",
"back",
"to",
"us",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L207-L211 | train | 211,911 |
Tinche/cattrs | src/cattr/converters.py | Converter._unstructure_mapping | def _unstructure_mapping(self, mapping):
"""Convert a mapping of attr classes to primitive equivalents."""
# We can reuse the mapping class, so dicts stay dicts and OrderedDicts
# stay OrderedDicts.
dispatch = self._unstructure_func.dispatch
return mapping.__class__(
(dispatch(k.__class__)(k), dispatch(v.__class__)(v))
for k, v in mapping.items()
) | python | def _unstructure_mapping(self, mapping):
"""Convert a mapping of attr classes to primitive equivalents."""
# We can reuse the mapping class, so dicts stay dicts and OrderedDicts
# stay OrderedDicts.
dispatch = self._unstructure_func.dispatch
return mapping.__class__(
(dispatch(k.__class__)(k), dispatch(v.__class__)(v))
for k, v in mapping.items()
) | [
"def",
"_unstructure_mapping",
"(",
"self",
",",
"mapping",
")",
":",
"# We can reuse the mapping class, so dicts stay dicts and OrderedDicts",
"# stay OrderedDicts.",
"dispatch",
"=",
"self",
".",
"_unstructure_func",
".",
"dispatch",
"return",
"mapping",
".",
"__class__",
... | Convert a mapping of attr classes to primitive equivalents. | [
"Convert",
"a",
"mapping",
"of",
"attr",
"classes",
"to",
"primitive",
"equivalents",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L227-L236 | train | 211,912 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_default | def _structure_default(self, obj, cl):
"""This is the fallthrough case. Everything is a subclass of `Any`.
A special condition here handles ``attrs`` classes.
Bare optionals end here too (optionals with arguments are unions.) We
treat bare optionals as Any.
"""
if cl is Any or cl is Optional:
return obj
# We don't know what this is, so we complain loudly.
msg = (
"Unsupported type: {0}. Register a structure hook for "
"it.".format(cl)
)
raise ValueError(msg) | python | def _structure_default(self, obj, cl):
"""This is the fallthrough case. Everything is a subclass of `Any`.
A special condition here handles ``attrs`` classes.
Bare optionals end here too (optionals with arguments are unions.) We
treat bare optionals as Any.
"""
if cl is Any or cl is Optional:
return obj
# We don't know what this is, so we complain loudly.
msg = (
"Unsupported type: {0}. Register a structure hook for "
"it.".format(cl)
)
raise ValueError(msg) | [
"def",
"_structure_default",
"(",
"self",
",",
"obj",
",",
"cl",
")",
":",
"if",
"cl",
"is",
"Any",
"or",
"cl",
"is",
"Optional",
":",
"return",
"obj",
"# We don't know what this is, so we complain loudly.",
"msg",
"=",
"(",
"\"Unsupported type: {0}. Register a stru... | This is the fallthrough case. Everything is a subclass of `Any`.
A special condition here handles ``attrs`` classes.
Bare optionals end here too (optionals with arguments are unions.) We
treat bare optionals as Any. | [
"This",
"is",
"the",
"fallthrough",
"case",
".",
"Everything",
"is",
"a",
"subclass",
"of",
"Any",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L240-L255 | train | 211,913 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_unicode | def _structure_unicode(self, obj, cl):
"""Just call ``cl`` with the given ``obj``"""
if not isinstance(obj, (bytes, unicode)):
return cl(str(obj))
else:
return obj | python | def _structure_unicode(self, obj, cl):
"""Just call ``cl`` with the given ``obj``"""
if not isinstance(obj, (bytes, unicode)):
return cl(str(obj))
else:
return obj | [
"def",
"_structure_unicode",
"(",
"self",
",",
"obj",
",",
"cl",
")",
":",
"if",
"not",
"isinstance",
"(",
"obj",
",",
"(",
"bytes",
",",
"unicode",
")",
")",
":",
"return",
"cl",
"(",
"str",
"(",
"obj",
")",
")",
"else",
":",
"return",
"obj"
] | Just call ``cl`` with the given ``obj`` | [
"Just",
"call",
"cl",
"with",
"the",
"given",
"obj"
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L266-L271 | train | 211,914 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_attr_from_tuple | def _structure_attr_from_tuple(self, a, name, value):
"""Handle an individual attrs attribute."""
type_ = a.type
if type_ is None:
# No type metadata.
return value
return self._structure_func.dispatch(type_)(value, type_) | python | def _structure_attr_from_tuple(self, a, name, value):
"""Handle an individual attrs attribute."""
type_ = a.type
if type_ is None:
# No type metadata.
return value
return self._structure_func.dispatch(type_)(value, type_) | [
"def",
"_structure_attr_from_tuple",
"(",
"self",
",",
"a",
",",
"name",
",",
"value",
")",
":",
"type_",
"=",
"a",
".",
"type",
"if",
"type_",
"is",
"None",
":",
"# No type metadata.",
"return",
"value",
"return",
"self",
".",
"_structure_func",
".",
"dis... | Handle an individual attrs attribute. | [
"Handle",
"an",
"individual",
"attrs",
"attribute",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L286-L292 | train | 211,915 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_list | def _structure_list(self, obj, cl):
"""Convert an iterable to a potentially generic list."""
if is_bare(cl) or cl.__args__[0] is Any:
return [e for e in obj]
else:
elem_type = cl.__args__[0]
return [
self._structure_func.dispatch(elem_type)(e, elem_type)
for e in obj
] | python | def _structure_list(self, obj, cl):
"""Convert an iterable to a potentially generic list."""
if is_bare(cl) or cl.__args__[0] is Any:
return [e for e in obj]
else:
elem_type = cl.__args__[0]
return [
self._structure_func.dispatch(elem_type)(e, elem_type)
for e in obj
] | [
"def",
"_structure_list",
"(",
"self",
",",
"obj",
",",
"cl",
")",
":",
"if",
"is_bare",
"(",
"cl",
")",
"or",
"cl",
".",
"__args__",
"[",
"0",
"]",
"is",
"Any",
":",
"return",
"[",
"e",
"for",
"e",
"in",
"obj",
"]",
"else",
":",
"elem_type",
"... | Convert an iterable to a potentially generic list. | [
"Convert",
"an",
"iterable",
"to",
"a",
"potentially",
"generic",
"list",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L319-L328 | train | 211,916 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_set | def _structure_set(self, obj, cl):
"""Convert an iterable into a potentially generic set."""
if is_bare(cl) or cl.__args__[0] is Any:
return set(obj)
else:
elem_type = cl.__args__[0]
return {
self._structure_func.dispatch(elem_type)(e, elem_type)
for e in obj
} | python | def _structure_set(self, obj, cl):
"""Convert an iterable into a potentially generic set."""
if is_bare(cl) or cl.__args__[0] is Any:
return set(obj)
else:
elem_type = cl.__args__[0]
return {
self._structure_func.dispatch(elem_type)(e, elem_type)
for e in obj
} | [
"def",
"_structure_set",
"(",
"self",
",",
"obj",
",",
"cl",
")",
":",
"if",
"is_bare",
"(",
"cl",
")",
"or",
"cl",
".",
"__args__",
"[",
"0",
"]",
"is",
"Any",
":",
"return",
"set",
"(",
"obj",
")",
"else",
":",
"elem_type",
"=",
"cl",
".",
"_... | Convert an iterable into a potentially generic set. | [
"Convert",
"an",
"iterable",
"into",
"a",
"potentially",
"generic",
"set",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L330-L339 | train | 211,917 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_frozenset | def _structure_frozenset(self, obj, cl):
"""Convert an iterable into a potentially generic frozenset."""
if is_bare(cl) or cl.__args__[0] is Any:
return frozenset(obj)
else:
elem_type = cl.__args__[0]
dispatch = self._structure_func.dispatch
return frozenset(dispatch(elem_type)(e, elem_type) for e in obj) | python | def _structure_frozenset(self, obj, cl):
"""Convert an iterable into a potentially generic frozenset."""
if is_bare(cl) or cl.__args__[0] is Any:
return frozenset(obj)
else:
elem_type = cl.__args__[0]
dispatch = self._structure_func.dispatch
return frozenset(dispatch(elem_type)(e, elem_type) for e in obj) | [
"def",
"_structure_frozenset",
"(",
"self",
",",
"obj",
",",
"cl",
")",
":",
"if",
"is_bare",
"(",
"cl",
")",
"or",
"cl",
".",
"__args__",
"[",
"0",
"]",
"is",
"Any",
":",
"return",
"frozenset",
"(",
"obj",
")",
"else",
":",
"elem_type",
"=",
"cl",... | Convert an iterable into a potentially generic frozenset. | [
"Convert",
"an",
"iterable",
"into",
"a",
"potentially",
"generic",
"frozenset",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L341-L348 | train | 211,918 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_dict | def _structure_dict(self, obj, cl):
"""Convert a mapping into a potentially generic dict."""
if is_bare(cl) or cl.__args__ == (Any, Any):
return dict(obj)
else:
key_type, val_type = cl.__args__
if key_type is Any:
val_conv = self._structure_func.dispatch(val_type)
return {k: val_conv(v, val_type) for k, v in obj.items()}
elif val_type is Any:
key_conv = self._structure_func.dispatch(key_type)
return {key_conv(k, key_type): v for k, v in obj.items()}
else:
key_conv = self._structure_func.dispatch(key_type)
val_conv = self._structure_func.dispatch(val_type)
return {
key_conv(k, key_type): val_conv(v, val_type)
for k, v in obj.items()
} | python | def _structure_dict(self, obj, cl):
"""Convert a mapping into a potentially generic dict."""
if is_bare(cl) or cl.__args__ == (Any, Any):
return dict(obj)
else:
key_type, val_type = cl.__args__
if key_type is Any:
val_conv = self._structure_func.dispatch(val_type)
return {k: val_conv(v, val_type) for k, v in obj.items()}
elif val_type is Any:
key_conv = self._structure_func.dispatch(key_type)
return {key_conv(k, key_type): v for k, v in obj.items()}
else:
key_conv = self._structure_func.dispatch(key_type)
val_conv = self._structure_func.dispatch(val_type)
return {
key_conv(k, key_type): val_conv(v, val_type)
for k, v in obj.items()
} | [
"def",
"_structure_dict",
"(",
"self",
",",
"obj",
",",
"cl",
")",
":",
"if",
"is_bare",
"(",
"cl",
")",
"or",
"cl",
".",
"__args__",
"==",
"(",
"Any",
",",
"Any",
")",
":",
"return",
"dict",
"(",
"obj",
")",
"else",
":",
"key_type",
",",
"val_ty... | Convert a mapping into a potentially generic dict. | [
"Convert",
"a",
"mapping",
"into",
"a",
"potentially",
"generic",
"dict",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L350-L368 | train | 211,919 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_union | def _structure_union(self, obj, union):
"""Deal with converting a union."""
# Unions with NoneType in them are basically optionals.
# We check for NoneType early and handle the case of obj being None,
# so disambiguation functions don't need to handle NoneType.
union_params = union.__args__
if NoneType in union_params: # type: ignore
if obj is None:
return None
if len(union_params) == 2:
# This is just a NoneType and something else.
other = (
union_params[0]
if union_params[1] is NoneType # type: ignore
else union_params[1]
)
# We can't actually have a Union of a Union, so this is safe.
return self._structure_func.dispatch(other)(obj, other)
# Check the union registry first.
handler = self._union_registry.get(union)
if handler is not None:
return handler(obj, union)
# Getting here means either this is not an optional, or it's an
# optional with more than one parameter.
# Let's support only unions of attr classes for now.
cl = self._dis_func_cache(union)(obj)
return self._structure_func.dispatch(cl)(obj, cl) | python | def _structure_union(self, obj, union):
"""Deal with converting a union."""
# Unions with NoneType in them are basically optionals.
# We check for NoneType early and handle the case of obj being None,
# so disambiguation functions don't need to handle NoneType.
union_params = union.__args__
if NoneType in union_params: # type: ignore
if obj is None:
return None
if len(union_params) == 2:
# This is just a NoneType and something else.
other = (
union_params[0]
if union_params[1] is NoneType # type: ignore
else union_params[1]
)
# We can't actually have a Union of a Union, so this is safe.
return self._structure_func.dispatch(other)(obj, other)
# Check the union registry first.
handler = self._union_registry.get(union)
if handler is not None:
return handler(obj, union)
# Getting here means either this is not an optional, or it's an
# optional with more than one parameter.
# Let's support only unions of attr classes for now.
cl = self._dis_func_cache(union)(obj)
return self._structure_func.dispatch(cl)(obj, cl) | [
"def",
"_structure_union",
"(",
"self",
",",
"obj",
",",
"union",
")",
":",
"# Unions with NoneType in them are basically optionals.",
"# We check for NoneType early and handle the case of obj being None,",
"# so disambiguation functions don't need to handle NoneType.",
"union_params",
"... | Deal with converting a union. | [
"Deal",
"with",
"converting",
"a",
"union",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L370-L398 | train | 211,920 |
Tinche/cattrs | src/cattr/converters.py | Converter._structure_tuple | def _structure_tuple(self, obj, tup):
"""Deal with converting to a tuple."""
tup_params = tup.__args__
has_ellipsis = tup_params and tup_params[-1] is Ellipsis
if tup_params is None or (has_ellipsis and tup_params[0] is Any):
# Just a Tuple. (No generic information.)
return tuple(obj)
if has_ellipsis:
# We're dealing with a homogenous tuple, Tuple[int, ...]
tup_type = tup_params[0]
conv = self._structure_func.dispatch(tup_type)
return tuple(conv(e, tup_type) for e in obj)
else:
# We're dealing with a heterogenous tuple.
return tuple(
self._structure_func.dispatch(t)(e, t)
for t, e in zip(tup_params, obj)
) | python | def _structure_tuple(self, obj, tup):
"""Deal with converting to a tuple."""
tup_params = tup.__args__
has_ellipsis = tup_params and tup_params[-1] is Ellipsis
if tup_params is None or (has_ellipsis and tup_params[0] is Any):
# Just a Tuple. (No generic information.)
return tuple(obj)
if has_ellipsis:
# We're dealing with a homogenous tuple, Tuple[int, ...]
tup_type = tup_params[0]
conv = self._structure_func.dispatch(tup_type)
return tuple(conv(e, tup_type) for e in obj)
else:
# We're dealing with a heterogenous tuple.
return tuple(
self._structure_func.dispatch(t)(e, t)
for t, e in zip(tup_params, obj)
) | [
"def",
"_structure_tuple",
"(",
"self",
",",
"obj",
",",
"tup",
")",
":",
"tup_params",
"=",
"tup",
".",
"__args__",
"has_ellipsis",
"=",
"tup_params",
"and",
"tup_params",
"[",
"-",
"1",
"]",
"is",
"Ellipsis",
"if",
"tup_params",
"is",
"None",
"or",
"("... | Deal with converting to a tuple. | [
"Deal",
"with",
"converting",
"to",
"a",
"tuple",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L400-L417 | train | 211,921 |
Tinche/cattrs | src/cattr/converters.py | Converter._get_dis_func | def _get_dis_func(self, union):
# type: (Type) -> Callable[..., Type]
"""Fetch or try creating a disambiguation function for a union."""
union_types = union.__args__
if NoneType in union_types: # type: ignore
# We support unions of attrs classes and NoneType higher in the
# logic.
union_types = tuple(
e for e in union_types if e is not NoneType # type: ignore
)
if not all(hasattr(e, "__attrs_attrs__") for e in union_types):
raise ValueError(
"Only unions of attr classes supported "
"currently. Register a loads hook manually."
)
return create_uniq_field_dis_func(*union_types) | python | def _get_dis_func(self, union):
# type: (Type) -> Callable[..., Type]
"""Fetch or try creating a disambiguation function for a union."""
union_types = union.__args__
if NoneType in union_types: # type: ignore
# We support unions of attrs classes and NoneType higher in the
# logic.
union_types = tuple(
e for e in union_types if e is not NoneType # type: ignore
)
if not all(hasattr(e, "__attrs_attrs__") for e in union_types):
raise ValueError(
"Only unions of attr classes supported "
"currently. Register a loads hook manually."
)
return create_uniq_field_dis_func(*union_types) | [
"def",
"_get_dis_func",
"(",
"self",
",",
"union",
")",
":",
"# type: (Type) -> Callable[..., Type]",
"union_types",
"=",
"union",
".",
"__args__",
"if",
"NoneType",
"in",
"union_types",
":",
"# type: ignore",
"# We support unions of attrs classes and NoneType higher in the",... | Fetch or try creating a disambiguation function for a union. | [
"Fetch",
"or",
"try",
"creating",
"a",
"disambiguation",
"function",
"for",
"a",
"union",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/converters.py#L419-L435 | train | 211,922 |
Tinche/cattrs | src/cattr/disambiguators.py | create_uniq_field_dis_func | def create_uniq_field_dis_func(*classes):
# type: (*Type) -> Callable
"""Given attr classes, generate a disambiguation function.
The function is based on unique fields."""
if len(classes) < 2:
raise ValueError("At least two classes required.")
cls_and_attrs = [(cl, set(at.name for at in fields(cl))) for cl in classes]
if len([attrs for _, attrs in cls_and_attrs if len(attrs) == 0]) > 1:
raise ValueError("At least two classes have no attributes.")
# TODO: Deal with a single class having no required attrs.
# For each class, attempt to generate a single unique required field.
uniq_attrs_dict = OrderedDict() # type: Dict[str, Type]
cls_and_attrs.sort(key=lambda c_a: -len(c_a[1]))
fallback = None # If none match, try this.
for i, (cl, cl_reqs) in enumerate(cls_and_attrs):
other_classes = cls_and_attrs[i + 1 :]
if other_classes:
other_reqs = reduce(or_, (c_a[1] for c_a in other_classes))
uniq = cl_reqs - other_reqs
if not uniq:
m = "{} has no usable unique attributes.".format(cl)
raise ValueError(m)
uniq_attrs_dict[next(iter(uniq))] = cl
else:
fallback = cl
def dis_func(data):
# type: (Mapping) -> Optional[Type]
if not isinstance(data, Mapping):
raise ValueError("Only input mappings are supported.")
for k, v in uniq_attrs_dict.items():
if k in data:
return v
return fallback
return dis_func | python | def create_uniq_field_dis_func(*classes):
# type: (*Type) -> Callable
"""Given attr classes, generate a disambiguation function.
The function is based on unique fields."""
if len(classes) < 2:
raise ValueError("At least two classes required.")
cls_and_attrs = [(cl, set(at.name for at in fields(cl))) for cl in classes]
if len([attrs for _, attrs in cls_and_attrs if len(attrs) == 0]) > 1:
raise ValueError("At least two classes have no attributes.")
# TODO: Deal with a single class having no required attrs.
# For each class, attempt to generate a single unique required field.
uniq_attrs_dict = OrderedDict() # type: Dict[str, Type]
cls_and_attrs.sort(key=lambda c_a: -len(c_a[1]))
fallback = None # If none match, try this.
for i, (cl, cl_reqs) in enumerate(cls_and_attrs):
other_classes = cls_and_attrs[i + 1 :]
if other_classes:
other_reqs = reduce(or_, (c_a[1] for c_a in other_classes))
uniq = cl_reqs - other_reqs
if not uniq:
m = "{} has no usable unique attributes.".format(cl)
raise ValueError(m)
uniq_attrs_dict[next(iter(uniq))] = cl
else:
fallback = cl
def dis_func(data):
# type: (Mapping) -> Optional[Type]
if not isinstance(data, Mapping):
raise ValueError("Only input mappings are supported.")
for k, v in uniq_attrs_dict.items():
if k in data:
return v
return fallback
return dis_func | [
"def",
"create_uniq_field_dis_func",
"(",
"*",
"classes",
")",
":",
"# type: (*Type) -> Callable",
"if",
"len",
"(",
"classes",
")",
"<",
"2",
":",
"raise",
"ValueError",
"(",
"\"At least two classes required.\"",
")",
"cls_and_attrs",
"=",
"[",
"(",
"cl",
",",
... | Given attr classes, generate a disambiguation function.
The function is based on unique fields. | [
"Given",
"attr",
"classes",
"generate",
"a",
"disambiguation",
"function",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/disambiguators.py#L17-L55 | train | 211,923 |
Tinche/cattrs | src/cattr/function_dispatch.py | FunctionDispatch._dispatch | def _dispatch(self, typ):
"""
returns the appropriate handler, for the object passed.
"""
for can_handle, handler in self._handler_pairs:
# can handle could raise an exception here
# such as issubclass being called on an instance.
# it's easier to just ignore that case.
try:
if can_handle(typ):
return handler
except Exception:
pass
raise KeyError("unable to find handler for {0}".format(typ)) | python | def _dispatch(self, typ):
"""
returns the appropriate handler, for the object passed.
"""
for can_handle, handler in self._handler_pairs:
# can handle could raise an exception here
# such as issubclass being called on an instance.
# it's easier to just ignore that case.
try:
if can_handle(typ):
return handler
except Exception:
pass
raise KeyError("unable to find handler for {0}".format(typ)) | [
"def",
"_dispatch",
"(",
"self",
",",
"typ",
")",
":",
"for",
"can_handle",
",",
"handler",
"in",
"self",
".",
"_handler_pairs",
":",
"# can handle could raise an exception here",
"# such as issubclass being called on an instance.",
"# it's easier to just ignore that case.",
... | returns the appropriate handler, for the object passed. | [
"returns",
"the",
"appropriate",
"handler",
"for",
"the",
"object",
"passed",
"."
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/function_dispatch.py#L23-L36 | train | 211,924 |
Tinche/cattrs | src/cattr/multistrategy_dispatch.py | MultiStrategyDispatch.register_cls_list | def register_cls_list(self, cls_and_handler):
""" register a class to singledispatch """
for cls, handler in cls_and_handler:
self._single_dispatch.register(cls, handler)
self.dispatch.cache_clear() | python | def register_cls_list(self, cls_and_handler):
""" register a class to singledispatch """
for cls, handler in cls_and_handler:
self._single_dispatch.register(cls, handler)
self.dispatch.cache_clear() | [
"def",
"register_cls_list",
"(",
"self",
",",
"cls_and_handler",
")",
":",
"for",
"cls",
",",
"handler",
"in",
"cls_and_handler",
":",
"self",
".",
"_single_dispatch",
".",
"register",
"(",
"cls",
",",
"handler",
")",
"self",
".",
"dispatch",
".",
"cache_cle... | register a class to singledispatch | [
"register",
"a",
"class",
"to",
"singledispatch"
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/multistrategy_dispatch.py#L40-L44 | train | 211,925 |
Tinche/cattrs | src/cattr/multistrategy_dispatch.py | MultiStrategyDispatch.register_func_list | def register_func_list(self, func_and_handler):
""" register a function to determine if the handle
should be used for the type
"""
for func, handler in func_and_handler:
self._function_dispatch.register(func, handler)
self.dispatch.cache_clear() | python | def register_func_list(self, func_and_handler):
""" register a function to determine if the handle
should be used for the type
"""
for func, handler in func_and_handler:
self._function_dispatch.register(func, handler)
self.dispatch.cache_clear() | [
"def",
"register_func_list",
"(",
"self",
",",
"func_and_handler",
")",
":",
"for",
"func",
",",
"handler",
"in",
"func_and_handler",
":",
"self",
".",
"_function_dispatch",
".",
"register",
"(",
"func",
",",
"handler",
")",
"self",
".",
"dispatch",
".",
"ca... | register a function to determine if the handle
should be used for the type | [
"register",
"a",
"function",
"to",
"determine",
"if",
"the",
"handle",
"should",
"be",
"used",
"for",
"the",
"type"
] | 481bc9bdb69b2190d699b54f331c8c5c075506d5 | https://github.com/Tinche/cattrs/blob/481bc9bdb69b2190d699b54f331c8c5c075506d5/src/cattr/multistrategy_dispatch.py#L46-L52 | train | 211,926 |
jeffknupp/sandman | sandman/model/models.py | Model.resource_uri | def resource_uri(self):
"""Return the URI at which the resource can be found.
:rtype: string
"""
primary_key_value = getattr(self, self.primary_key(), None)
return '/{}/{}'.format(self.endpoint(), primary_key_value) | python | def resource_uri(self):
"""Return the URI at which the resource can be found.
:rtype: string
"""
primary_key_value = getattr(self, self.primary_key(), None)
return '/{}/{}'.format(self.endpoint(), primary_key_value) | [
"def",
"resource_uri",
"(",
"self",
")",
":",
"primary_key_value",
"=",
"getattr",
"(",
"self",
",",
"self",
".",
"primary_key",
"(",
")",
",",
"None",
")",
"return",
"'/{}/{}'",
".",
"format",
"(",
"self",
".",
"endpoint",
"(",
")",
",",
"primary_key_va... | Return the URI at which the resource can be found.
:rtype: string | [
"Return",
"the",
"URI",
"at",
"which",
"the",
"resource",
"can",
"be",
"found",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/models.py#L70-L77 | train | 211,927 |
jeffknupp/sandman | sandman/model/models.py | Model.primary_key | def primary_key(cls):
"""Return the name of the table's primary key
:rtype: string
"""
if cls.__from_class__:
cls = cls.__from_class__
return cls.__table__.primary_key.columns.values()[0].name | python | def primary_key(cls):
"""Return the name of the table's primary key
:rtype: string
"""
if cls.__from_class__:
cls = cls.__from_class__
return cls.__table__.primary_key.columns.values()[0].name | [
"def",
"primary_key",
"(",
"cls",
")",
":",
"if",
"cls",
".",
"__from_class__",
":",
"cls",
"=",
"cls",
".",
"__from_class__",
"return",
"cls",
".",
"__table__",
".",
"primary_key",
".",
"columns",
".",
"values",
"(",
")",
"[",
"0",
"]",
".",
"name"
] | Return the name of the table's primary key
:rtype: string | [
"Return",
"the",
"name",
"of",
"the",
"table",
"s",
"primary",
"key"
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/models.py#L80-L89 | train | 211,928 |
jeffknupp/sandman | sandman/model/models.py | Model.links | def links(self):
"""Return a list of links for endpoints related to the resource.
:rtype: list
"""
links = []
for foreign_key in self.__table__.foreign_keys:
column = foreign_key.column.name
column_value = getattr(self, column, None)
if column_value:
table = foreign_key.column.table.name
with app.app_context():
endpoint = current_app.class_references[table]
links.append({'rel': 'related', 'uri': '/{}/{}'.format(
endpoint.__name__, column_value)})
links.append({'rel': 'self', 'uri': self.resource_uri()})
return links | python | def links(self):
"""Return a list of links for endpoints related to the resource.
:rtype: list
"""
links = []
for foreign_key in self.__table__.foreign_keys:
column = foreign_key.column.name
column_value = getattr(self, column, None)
if column_value:
table = foreign_key.column.table.name
with app.app_context():
endpoint = current_app.class_references[table]
links.append({'rel': 'related', 'uri': '/{}/{}'.format(
endpoint.__name__, column_value)})
links.append({'rel': 'self', 'uri': self.resource_uri()})
return links | [
"def",
"links",
"(",
"self",
")",
":",
"links",
"=",
"[",
"]",
"for",
"foreign_key",
"in",
"self",
".",
"__table__",
".",
"foreign_keys",
":",
"column",
"=",
"foreign_key",
".",
"column",
".",
"name",
"column_value",
"=",
"getattr",
"(",
"self",
",",
"... | Return a list of links for endpoints related to the resource.
:rtype: list | [
"Return",
"a",
"list",
"of",
"links",
"for",
"endpoints",
"related",
"to",
"the",
"resource",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/models.py#L91-L109 | train | 211,929 |
jeffknupp/sandman | sandman/model/models.py | Model.as_dict | def as_dict(self, depth=0):
"""Return a dictionary containing only the attributes which map to
an instance's database columns.
:param int depth: Maximum depth to recurse subobjects
:rtype: dict
"""
result_dict = {}
for column in self.__table__.columns.keys():
result_dict[column] = getattr(self, column, None)
if isinstance(result_dict[column], Decimal):
result_dict[column] = str(result_dict[column])
result_dict['links'] = self.links()
for foreign_key in self.__table__.foreign_keys:
column_name = foreign_key.column.name
column_value = getattr(self, column_name, None)
if column_value:
table = foreign_key.column.table.name
with app.app_context():
endpoint = current_app.class_references[table]
session = db.session()
resource = session.query(endpoint).get(column_value)
if depth > 0:
result_dict.update({
'rel': endpoint.__name__,
endpoint.__name__.lower(): resource.as_dict(depth - 1)
})
else:
result_dict[
endpoint.__name__.lower() + '_url'] = '/{}/{}'.format(
endpoint.__name__, column_value)
result_dict['self'] = self.resource_uri()
return result_dict | python | def as_dict(self, depth=0):
"""Return a dictionary containing only the attributes which map to
an instance's database columns.
:param int depth: Maximum depth to recurse subobjects
:rtype: dict
"""
result_dict = {}
for column in self.__table__.columns.keys():
result_dict[column] = getattr(self, column, None)
if isinstance(result_dict[column], Decimal):
result_dict[column] = str(result_dict[column])
result_dict['links'] = self.links()
for foreign_key in self.__table__.foreign_keys:
column_name = foreign_key.column.name
column_value = getattr(self, column_name, None)
if column_value:
table = foreign_key.column.table.name
with app.app_context():
endpoint = current_app.class_references[table]
session = db.session()
resource = session.query(endpoint).get(column_value)
if depth > 0:
result_dict.update({
'rel': endpoint.__name__,
endpoint.__name__.lower(): resource.as_dict(depth - 1)
})
else:
result_dict[
endpoint.__name__.lower() + '_url'] = '/{}/{}'.format(
endpoint.__name__, column_value)
result_dict['self'] = self.resource_uri()
return result_dict | [
"def",
"as_dict",
"(",
"self",
",",
"depth",
"=",
"0",
")",
":",
"result_dict",
"=",
"{",
"}",
"for",
"column",
"in",
"self",
".",
"__table__",
".",
"columns",
".",
"keys",
"(",
")",
":",
"result_dict",
"[",
"column",
"]",
"=",
"getattr",
"(",
"sel... | Return a dictionary containing only the attributes which map to
an instance's database columns.
:param int depth: Maximum depth to recurse subobjects
:rtype: dict | [
"Return",
"a",
"dictionary",
"containing",
"only",
"the",
"attributes",
"which",
"map",
"to",
"an",
"instance",
"s",
"database",
"columns",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/models.py#L111-L145 | train | 211,930 |
jeffknupp/sandman | sandman/model/models.py | Model.meta | def meta(cls):
"""Return a dictionary containing meta-information about the given
resource."""
if getattr(cls, '__from_class__', None) is not None:
cls = cls.__from_class__
attribute_info = {}
for name, value in cls.__table__.columns.items():
attribute_info[name] = str(value.type).lower()
return {cls.__name__: attribute_info} | python | def meta(cls):
"""Return a dictionary containing meta-information about the given
resource."""
if getattr(cls, '__from_class__', None) is not None:
cls = cls.__from_class__
attribute_info = {}
for name, value in cls.__table__.columns.items():
attribute_info[name] = str(value.type).lower()
return {cls.__name__: attribute_info} | [
"def",
"meta",
"(",
"cls",
")",
":",
"if",
"getattr",
"(",
"cls",
",",
"'__from_class__'",
",",
"None",
")",
"is",
"not",
"None",
":",
"cls",
"=",
"cls",
".",
"__from_class__",
"attribute_info",
"=",
"{",
"}",
"for",
"name",
",",
"value",
"in",
"cls"... | Return a dictionary containing meta-information about the given
resource. | [
"Return",
"a",
"dictionary",
"containing",
"meta",
"-",
"information",
"about",
"the",
"given",
"resource",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/models.py#L176-L185 | train | 211,931 |
jeffknupp/sandman | sandman/sandmanctl.py | print_version | def print_version(ctx, value):
"""Print the current version of sandman and exit."""
if not value:
return
import pkg_resources
version = None
try:
version = pkg_resources.get_distribution('sandman').version
finally:
del pkg_resources
click.echo(version)
ctx.exit() | python | def print_version(ctx, value):
"""Print the current version of sandman and exit."""
if not value:
return
import pkg_resources
version = None
try:
version = pkg_resources.get_distribution('sandman').version
finally:
del pkg_resources
click.echo(version)
ctx.exit() | [
"def",
"print_version",
"(",
"ctx",
",",
"value",
")",
":",
"if",
"not",
"value",
":",
"return",
"import",
"pkg_resources",
"version",
"=",
"None",
"try",
":",
"version",
"=",
"pkg_resources",
".",
"get_distribution",
"(",
"'sandman'",
")",
".",
"version",
... | Print the current version of sandman and exit. | [
"Print",
"the",
"current",
"version",
"of",
"sandman",
"and",
"exit",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/sandmanctl.py#L9-L20 | train | 211,932 |
jeffknupp/sandman | sandman/sandman.py | _get_acceptable_response_type | def _get_acceptable_response_type():
"""Return the mimetype for this request."""
if ('Accept' not in request.headers or request.headers['Accept'] in
ALL_CONTENT_TYPES):
return JSON
acceptable_content_types = set(
request.headers['ACCEPT'].strip().split(','))
if acceptable_content_types & HTML_CONTENT_TYPES:
return HTML
elif acceptable_content_types & JSON_CONTENT_TYPES:
return JSON
else:
# HTTP 406 Not Acceptable
raise InvalidAPIUsage(406) | python | def _get_acceptable_response_type():
"""Return the mimetype for this request."""
if ('Accept' not in request.headers or request.headers['Accept'] in
ALL_CONTENT_TYPES):
return JSON
acceptable_content_types = set(
request.headers['ACCEPT'].strip().split(','))
if acceptable_content_types & HTML_CONTENT_TYPES:
return HTML
elif acceptable_content_types & JSON_CONTENT_TYPES:
return JSON
else:
# HTTP 406 Not Acceptable
raise InvalidAPIUsage(406) | [
"def",
"_get_acceptable_response_type",
"(",
")",
":",
"if",
"(",
"'Accept'",
"not",
"in",
"request",
".",
"headers",
"or",
"request",
".",
"headers",
"[",
"'Accept'",
"]",
"in",
"ALL_CONTENT_TYPES",
")",
":",
"return",
"JSON",
"acceptable_content_types",
"=",
... | Return the mimetype for this request. | [
"Return",
"the",
"mimetype",
"for",
"this",
"request",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/sandman.py#L41-L54 | train | 211,933 |
jeffknupp/sandman | sandman/sandman.py | handle_exception | def handle_exception(error):
"""Return a response with the appropriate status code, message, and content
type when an ``InvalidAPIUsage`` exception is raised."""
try:
if _get_acceptable_response_type() == JSON:
response = jsonify(error.to_dict())
response.status_code = error.code
return response
else:
return error.abort()
except InvalidAPIUsage:
# In addition to the original exception, we don't support the content
# type in the request's 'Accept' header, which is a more important
# error, so return that instead of what was originally raised.
response = jsonify(error.to_dict())
response.status_code = 415
return response | python | def handle_exception(error):
"""Return a response with the appropriate status code, message, and content
type when an ``InvalidAPIUsage`` exception is raised."""
try:
if _get_acceptable_response_type() == JSON:
response = jsonify(error.to_dict())
response.status_code = error.code
return response
else:
return error.abort()
except InvalidAPIUsage:
# In addition to the original exception, we don't support the content
# type in the request's 'Accept' header, which is a more important
# error, so return that instead of what was originally raised.
response = jsonify(error.to_dict())
response.status_code = 415
return response | [
"def",
"handle_exception",
"(",
"error",
")",
":",
"try",
":",
"if",
"_get_acceptable_response_type",
"(",
")",
"==",
"JSON",
":",
"response",
"=",
"jsonify",
"(",
"error",
".",
"to_dict",
"(",
")",
")",
"response",
".",
"status_code",
"=",
"error",
".",
... | Return a response with the appropriate status code, message, and content
type when an ``InvalidAPIUsage`` exception is raised. | [
"Return",
"a",
"response",
"with",
"the",
"appropriate",
"status",
"code",
"message",
"and",
"content",
"type",
"when",
"an",
"InvalidAPIUsage",
"exception",
"is",
"raised",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/sandman.py#L58-L74 | train | 211,934 |
jeffknupp/sandman | sandman/sandman.py | _single_attribute_html_response | def _single_attribute_html_response(resource, name, value):
"""Return the json representation of a single attribute of a resource.
:param :class:`sandman.model.Model` resource: resource for attribute
:param string name: name of the attribute
:param string value: string value of the attribute
:rtype: :class:`flask.Response`
"""
return make_response(render_template(
'attribute.html',
resource=resource,
name=name, value=value)) | python | def _single_attribute_html_response(resource, name, value):
"""Return the json representation of a single attribute of a resource.
:param :class:`sandman.model.Model` resource: resource for attribute
:param string name: name of the attribute
:param string value: string value of the attribute
:rtype: :class:`flask.Response`
"""
return make_response(render_template(
'attribute.html',
resource=resource,
name=name, value=value)) | [
"def",
"_single_attribute_html_response",
"(",
"resource",
",",
"name",
",",
"value",
")",
":",
"return",
"make_response",
"(",
"render_template",
"(",
"'attribute.html'",
",",
"resource",
"=",
"resource",
",",
"name",
"=",
"name",
",",
"value",
"=",
"value",
... | Return the json representation of a single attribute of a resource.
:param :class:`sandman.model.Model` resource: resource for attribute
:param string name: name of the attribute
:param string value: string value of the attribute
:rtype: :class:`flask.Response` | [
"Return",
"the",
"json",
"representation",
"of",
"a",
"single",
"attribute",
"of",
"a",
"resource",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/sandman.py#L106-L118 | train | 211,935 |
jeffknupp/sandman | sandman/sandman.py | put_resource | def put_resource(collection, key):
"""Replace the resource identified by the given key and return the
appropriate response.
:param string collection: a :class:`sandman.model.Model` endpoint
:rtype: :class:`flask.Response`
"""
resource = retrieve_resource(collection, key)
_validate(endpoint_class(collection), request.method, resource)
resource.replace(get_resource_data(request))
try:
_perform_database_action('add', resource)
except IntegrityError as exception:
raise InvalidAPIUsage(422, FORWARDED_EXCEPTION_MESSAGE.format(
exception))
return no_content_response() | python | def put_resource(collection, key):
"""Replace the resource identified by the given key and return the
appropriate response.
:param string collection: a :class:`sandman.model.Model` endpoint
:rtype: :class:`flask.Response`
"""
resource = retrieve_resource(collection, key)
_validate(endpoint_class(collection), request.method, resource)
resource.replace(get_resource_data(request))
try:
_perform_database_action('add', resource)
except IntegrityError as exception:
raise InvalidAPIUsage(422, FORWARDED_EXCEPTION_MESSAGE.format(
exception))
return no_content_response() | [
"def",
"put_resource",
"(",
"collection",
",",
"key",
")",
":",
"resource",
"=",
"retrieve_resource",
"(",
"collection",
",",
"key",
")",
"_validate",
"(",
"endpoint_class",
"(",
"collection",
")",
",",
"request",
".",
"method",
",",
"resource",
")",
"resour... | Replace the resource identified by the given key and return the
appropriate response.
:param string collection: a :class:`sandman.model.Model` endpoint
:rtype: :class:`flask.Response` | [
"Replace",
"the",
"resource",
"identified",
"by",
"the",
"given",
"key",
"and",
"return",
"the",
"appropriate",
"response",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/sandman.py#L416-L434 | train | 211,936 |
jeffknupp/sandman | sandman/sandman.py | index | def index():
"""Return information about each type of resource and how it can be
accessed."""
classes = []
with app.app_context():
classes = set(current_app.class_references.values())
if _get_acceptable_response_type() == JSON:
meta_data = {}
for cls in classes:
meta_data[cls.endpoint()] = {
'link': '/' + cls.endpoint(),
'meta': '/' + cls.endpoint() + '/meta'
}
return jsonify(meta_data)
else:
return render_template('index.html', classes=classes) | python | def index():
"""Return information about each type of resource and how it can be
accessed."""
classes = []
with app.app_context():
classes = set(current_app.class_references.values())
if _get_acceptable_response_type() == JSON:
meta_data = {}
for cls in classes:
meta_data[cls.endpoint()] = {
'link': '/' + cls.endpoint(),
'meta': '/' + cls.endpoint() + '/meta'
}
return jsonify(meta_data)
else:
return render_template('index.html', classes=classes) | [
"def",
"index",
"(",
")",
":",
"classes",
"=",
"[",
"]",
"with",
"app",
".",
"app_context",
"(",
")",
":",
"classes",
"=",
"set",
"(",
"current_app",
".",
"class_references",
".",
"values",
"(",
")",
")",
"if",
"_get_acceptable_response_type",
"(",
")",
... | Return information about each type of resource and how it can be
accessed. | [
"Return",
"information",
"about",
"each",
"type",
"of",
"resource",
"and",
"how",
"it",
"can",
"be",
"accessed",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/sandman.py#L544-L559 | train | 211,937 |
jeffknupp/sandman | sandman/sandman.py | get_meta | def get_meta(collection):
"""Return the meta-description of a given resource.
:param collection: The collection to get meta-info for
"""
cls = endpoint_class(collection)
description = cls.meta()
return jsonify(description) | python | def get_meta(collection):
"""Return the meta-description of a given resource.
:param collection: The collection to get meta-info for
"""
cls = endpoint_class(collection)
description = cls.meta()
return jsonify(description) | [
"def",
"get_meta",
"(",
"collection",
")",
":",
"cls",
"=",
"endpoint_class",
"(",
"collection",
")",
"description",
"=",
"cls",
".",
"meta",
"(",
")",
"return",
"jsonify",
"(",
"description",
")"
] | Return the meta-description of a given resource.
:param collection: The collection to get meta-info for | [
"Return",
"the",
"meta",
"-",
"description",
"of",
"a",
"given",
"resource",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/sandman.py#L564-L573 | train | 211,938 |
jeffknupp/sandman | sandman/exception.py | InvalidAPIUsage.abort | def abort(self):
"""Return an HTML Response representation of the exception."""
resp = make_response(render_template('error.html', error=self.code, message=self.message), self.code)
return resp | python | def abort(self):
"""Return an HTML Response representation of the exception."""
resp = make_response(render_template('error.html', error=self.code, message=self.message), self.code)
return resp | [
"def",
"abort",
"(",
"self",
")",
":",
"resp",
"=",
"make_response",
"(",
"render_template",
"(",
"'error.html'",
",",
"error",
"=",
"self",
".",
"code",
",",
"message",
"=",
"self",
".",
"message",
")",
",",
"self",
".",
"code",
")",
"return",
"resp"
... | Return an HTML Response representation of the exception. | [
"Return",
"an",
"HTML",
"Response",
"representation",
"of",
"the",
"exception",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/exception.py#L22-L25 | train | 211,939 |
jeffknupp/sandman | sandman/model/utils.py | generate_endpoint_classes | def generate_endpoint_classes(db, generate_pks=False):
"""Return a list of model classes generated for each reflected database
table."""
seen_classes = set()
for cls in current_app.class_references.values():
seen_classes.add(cls.__tablename__)
with app.app_context():
db.metadata.reflect(bind=db.engine)
for name, table in db.metadata.tables.items():
if not name in seen_classes:
seen_classes.add(name)
if not table.primary_key and generate_pks:
cls = add_pk_if_required(db, table, name)
else:
cls = type(
str(name),
(sandman_model, db.Model),
{'__tablename__': name})
register(cls) | python | def generate_endpoint_classes(db, generate_pks=False):
"""Return a list of model classes generated for each reflected database
table."""
seen_classes = set()
for cls in current_app.class_references.values():
seen_classes.add(cls.__tablename__)
with app.app_context():
db.metadata.reflect(bind=db.engine)
for name, table in db.metadata.tables.items():
if not name in seen_classes:
seen_classes.add(name)
if not table.primary_key and generate_pks:
cls = add_pk_if_required(db, table, name)
else:
cls = type(
str(name),
(sandman_model, db.Model),
{'__tablename__': name})
register(cls) | [
"def",
"generate_endpoint_classes",
"(",
"db",
",",
"generate_pks",
"=",
"False",
")",
":",
"seen_classes",
"=",
"set",
"(",
")",
"for",
"cls",
"in",
"current_app",
".",
"class_references",
".",
"values",
"(",
")",
":",
"seen_classes",
".",
"add",
"(",
"cl... | Return a list of model classes generated for each reflected database
table. | [
"Return",
"a",
"list",
"of",
"model",
"classes",
"generated",
"for",
"each",
"reflected",
"database",
"table",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/utils.py#L25-L43 | train | 211,940 |
jeffknupp/sandman | sandman/model/utils.py | add_pk_if_required | def add_pk_if_required(db, table, name):
"""Return a class deriving from our Model class as well as the SQLAlchemy
model.
:param `sqlalchemy.schema.Table` table: table to create primary key for
:param table: table to create primary key for
"""
db.metadata.reflect(bind=db.engine)
cls_dict = {'__tablename__': name}
if not table.primary_key:
for column in table.columns:
column.primary_key = True
Table(name, db.metadata, *table.columns, extend_existing=True)
cls_dict['__table__'] = table
db.metadata.create_all(bind=db.engine)
return type(str(name), (sandman_model, db.Model), cls_dict) | python | def add_pk_if_required(db, table, name):
"""Return a class deriving from our Model class as well as the SQLAlchemy
model.
:param `sqlalchemy.schema.Table` table: table to create primary key for
:param table: table to create primary key for
"""
db.metadata.reflect(bind=db.engine)
cls_dict = {'__tablename__': name}
if not table.primary_key:
for column in table.columns:
column.primary_key = True
Table(name, db.metadata, *table.columns, extend_existing=True)
cls_dict['__table__'] = table
db.metadata.create_all(bind=db.engine)
return type(str(name), (sandman_model, db.Model), cls_dict) | [
"def",
"add_pk_if_required",
"(",
"db",
",",
"table",
",",
"name",
")",
":",
"db",
".",
"metadata",
".",
"reflect",
"(",
"bind",
"=",
"db",
".",
"engine",
")",
"cls_dict",
"=",
"{",
"'__tablename__'",
":",
"name",
"}",
"if",
"not",
"table",
".",
"pri... | Return a class deriving from our Model class as well as the SQLAlchemy
model.
:param `sqlalchemy.schema.Table` table: table to create primary key for
:param table: table to create primary key for | [
"Return",
"a",
"class",
"deriving",
"from",
"our",
"Model",
"class",
"as",
"well",
"as",
"the",
"SQLAlchemy",
"model",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/utils.py#L46-L63 | train | 211,941 |
jeffknupp/sandman | sandman/model/utils.py | prepare_relationships | def prepare_relationships(db, known_tables):
"""Enrich the registered Models with SQLAlchemy ``relationships``
so that related tables are correctly processed up by the admin.
"""
inspector = reflection.Inspector.from_engine(db.engine)
for cls in set(known_tables.values()):
for foreign_key in inspector.get_foreign_keys(cls.__tablename__):
if foreign_key['referred_table'] in known_tables:
other = known_tables[foreign_key['referred_table']]
constrained_column = foreign_key['constrained_columns']
if other not in cls.__related_tables__ and cls not in (
other.__related_tables__) and other != cls:
cls.__related_tables__.add(other)
# Add a SQLAlchemy relationship as an attribute
# on the class
setattr(cls, other.__table__.name, relationship(
other.__name__, backref=db.backref(
cls.__name__.lower()),
foreign_keys=str(cls.__name__) + '.' +
''.join(constrained_column))) | python | def prepare_relationships(db, known_tables):
"""Enrich the registered Models with SQLAlchemy ``relationships``
so that related tables are correctly processed up by the admin.
"""
inspector = reflection.Inspector.from_engine(db.engine)
for cls in set(known_tables.values()):
for foreign_key in inspector.get_foreign_keys(cls.__tablename__):
if foreign_key['referred_table'] in known_tables:
other = known_tables[foreign_key['referred_table']]
constrained_column = foreign_key['constrained_columns']
if other not in cls.__related_tables__ and cls not in (
other.__related_tables__) and other != cls:
cls.__related_tables__.add(other)
# Add a SQLAlchemy relationship as an attribute
# on the class
setattr(cls, other.__table__.name, relationship(
other.__name__, backref=db.backref(
cls.__name__.lower()),
foreign_keys=str(cls.__name__) + '.' +
''.join(constrained_column))) | [
"def",
"prepare_relationships",
"(",
"db",
",",
"known_tables",
")",
":",
"inspector",
"=",
"reflection",
".",
"Inspector",
".",
"from_engine",
"(",
"db",
".",
"engine",
")",
"for",
"cls",
"in",
"set",
"(",
"known_tables",
".",
"values",
"(",
")",
")",
"... | Enrich the registered Models with SQLAlchemy ``relationships``
so that related tables are correctly processed up by the admin. | [
"Enrich",
"the",
"registered",
"Models",
"with",
"SQLAlchemy",
"relationships",
"so",
"that",
"related",
"tables",
"are",
"correctly",
"processed",
"up",
"by",
"the",
"admin",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/utils.py#L66-L86 | train | 211,942 |
jeffknupp/sandman | sandman/model/utils.py | register_classes_for_admin | def register_classes_for_admin(db_session, show_pks=True, name='admin'):
"""Registers classes for the Admin view that ultimately creates the admin
interface.
:param db_session: handle to database session
:param list classes: list of classes to register with the admin
:param bool show_pks: show primary key columns in the admin?
"""
with app.app_context():
admin_view = Admin(current_app, name=name)
for cls in set(
cls for cls in current_app.class_references.values() if
cls.use_admin):
column_list = [column.name for column in
cls.__table__.columns.values()]
if hasattr(cls, '__view__'):
# allow ability for model classes to specify model views
admin_view_class = type(
'AdminView',
(cls.__view__,),
{'form_columns': column_list})
elif show_pks:
# the default of Flask-SQLAlchemy is to not show primary
# classes, which obviously isn't acceptable in some cases
admin_view_class = type(
'AdminView',
(AdminModelViewWithPK,),
{'form_columns': column_list})
else:
admin_view_class = ModelView
admin_view.add_view(admin_view_class(cls, db_session)) | python | def register_classes_for_admin(db_session, show_pks=True, name='admin'):
"""Registers classes for the Admin view that ultimately creates the admin
interface.
:param db_session: handle to database session
:param list classes: list of classes to register with the admin
:param bool show_pks: show primary key columns in the admin?
"""
with app.app_context():
admin_view = Admin(current_app, name=name)
for cls in set(
cls for cls in current_app.class_references.values() if
cls.use_admin):
column_list = [column.name for column in
cls.__table__.columns.values()]
if hasattr(cls, '__view__'):
# allow ability for model classes to specify model views
admin_view_class = type(
'AdminView',
(cls.__view__,),
{'form_columns': column_list})
elif show_pks:
# the default of Flask-SQLAlchemy is to not show primary
# classes, which obviously isn't acceptable in some cases
admin_view_class = type(
'AdminView',
(AdminModelViewWithPK,),
{'form_columns': column_list})
else:
admin_view_class = ModelView
admin_view.add_view(admin_view_class(cls, db_session)) | [
"def",
"register_classes_for_admin",
"(",
"db_session",
",",
"show_pks",
"=",
"True",
",",
"name",
"=",
"'admin'",
")",
":",
"with",
"app",
".",
"app_context",
"(",
")",
":",
"admin_view",
"=",
"Admin",
"(",
"current_app",
",",
"name",
"=",
"name",
")",
... | Registers classes for the Admin view that ultimately creates the admin
interface.
:param db_session: handle to database session
:param list classes: list of classes to register with the admin
:param bool show_pks: show primary key columns in the admin? | [
"Registers",
"classes",
"for",
"the",
"Admin",
"view",
"that",
"ultimately",
"creates",
"the",
"admin",
"interface",
"."
] | 253ea4d15cbccd9f0016d66fedd7478614cc0b2f | https://github.com/jeffknupp/sandman/blob/253ea4d15cbccd9f0016d66fedd7478614cc0b2f/sandman/model/utils.py#L128-L160 | train | 211,943 |
Bachmann1234/diff-cover | diff_cover/violationsreporters/violations_reporter.py | XmlCoverageReporter._get_classes | def _get_classes(self, xml_document, src_path):
"""
Given a path and parsed xml_document provides class nodes
with the relevant lines
First, we look to see if xml_document contains a source
node providing paths to search for
If we don't have that we check each nodes filename attribute
matches an absolute path
Finally, if we found no nodes, we check the filename attribute
for the relative path
"""
# Remove git_root from src_path for searching the correct filename
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `diff_cover/violations_reporter.py`
# search for `violations_reporter.py`
src_rel_path = self._to_unix_path(GitPathTool.relative_path(src_path))
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `other_package/some_file.py`
# search for `/home/user/work/diff-cover/other_package/some_file.py`
src_abs_path = self._to_unix_path(GitPathTool.absolute_path(src_path))
# cobertura sometimes provides the sources for the measurements
# within it. If we have that we outta use it
sources = xml_document.findall('sources/source')
sources = [source.text for source in sources if source.text]
classes = [class_tree
for class_tree in xml_document.findall(".//class")
or []]
classes = (
[clazz for clazz in classes if
src_abs_path in [
self._to_unix_path(
os.path.join(
source.strip(),
clazz.get('filename')
)
) for source in sources]]
or
[clazz for clazz in classes if
self._to_unix_path(clazz.get('filename')) == src_abs_path]
or
[clazz for clazz in classes if
self._to_unix_path(clazz.get('filename')) == src_rel_path]
)
return classes | python | def _get_classes(self, xml_document, src_path):
"""
Given a path and parsed xml_document provides class nodes
with the relevant lines
First, we look to see if xml_document contains a source
node providing paths to search for
If we don't have that we check each nodes filename attribute
matches an absolute path
Finally, if we found no nodes, we check the filename attribute
for the relative path
"""
# Remove git_root from src_path for searching the correct filename
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `diff_cover/violations_reporter.py`
# search for `violations_reporter.py`
src_rel_path = self._to_unix_path(GitPathTool.relative_path(src_path))
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `other_package/some_file.py`
# search for `/home/user/work/diff-cover/other_package/some_file.py`
src_abs_path = self._to_unix_path(GitPathTool.absolute_path(src_path))
# cobertura sometimes provides the sources for the measurements
# within it. If we have that we outta use it
sources = xml_document.findall('sources/source')
sources = [source.text for source in sources if source.text]
classes = [class_tree
for class_tree in xml_document.findall(".//class")
or []]
classes = (
[clazz for clazz in classes if
src_abs_path in [
self._to_unix_path(
os.path.join(
source.strip(),
clazz.get('filename')
)
) for source in sources]]
or
[clazz for clazz in classes if
self._to_unix_path(clazz.get('filename')) == src_abs_path]
or
[clazz for clazz in classes if
self._to_unix_path(clazz.get('filename')) == src_rel_path]
)
return classes | [
"def",
"_get_classes",
"(",
"self",
",",
"xml_document",
",",
"src_path",
")",
":",
"# Remove git_root from src_path for searching the correct filename",
"# If cwd is `/home/user/work/diff-cover/diff_cover`",
"# and src_path is `diff_cover/violations_reporter.py`",
"# search for `violation... | Given a path and parsed xml_document provides class nodes
with the relevant lines
First, we look to see if xml_document contains a source
node providing paths to search for
If we don't have that we check each nodes filename attribute
matches an absolute path
Finally, if we found no nodes, we check the filename attribute
for the relative path | [
"Given",
"a",
"path",
"and",
"parsed",
"xml_document",
"provides",
"class",
"nodes",
"with",
"the",
"relevant",
"lines"
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/violationsreporters/violations_reporter.py#L51-L100 | train | 211,944 |
Bachmann1234/diff-cover | diff_cover/violationsreporters/violations_reporter.py | XmlCoverageReporter._cache_file | def _cache_file(self, src_path):
"""
Load the data from `self._xml_roots`
for `src_path`, if it hasn't been already.
"""
# If we have not yet loaded this source file
if src_path not in self._info_cache:
# We only want to keep violations that show up in each xml source.
# Thus, each time, we take the intersection. However, to do this
# we must treat the first time as a special case and just add all
# the violations from the first xml report.
violations = None
# A line is measured if it is measured in any of the reports, so
# we take set union each time and can just start with the empty set
measured = set()
# Loop through the files that contain the xml roots
for xml_document in self._xml_roots:
if xml_document.findall('.[@clover]'):
# see etc/schema/clover.xsd at https://bitbucket.org/atlassian/clover/src
line_nodes = self._get_src_path_line_nodes_clover(xml_document, src_path)
_number = 'num'
_hits = 'count'
elif xml_document.findall('.[@name]'):
# https://github.com/jacoco/jacoco/blob/master/org.jacoco.report/src/org/jacoco/report/xml/report.dtd
line_nodes = self._get_src_path_line_nodes_jacoco(xml_document, src_path)
_number = 'nr'
_hits = 'ci'
else:
# https://github.com/cobertura/web/blob/master/htdocs/xml/coverage-04.dtd
line_nodes = self._get_src_path_line_nodes_cobertura(xml_document, src_path)
_number = 'number'
_hits = 'hits'
if line_nodes is None:
continue
# First case, need to define violations initially
if violations is None:
violations = set(
Violation(int(line.get(_number)), None)
for line in line_nodes
if int(line.get(_hits, 0)) == 0)
# If we already have a violations set,
# take the intersection of the new
# violations set and its old self
else:
violations = violations & set(
Violation(int(line.get(_number)), None)
for line in line_nodes
if int(line.get(_hits, 0)) == 0
)
# Measured is the union of itself and the new measured
measured = measured | set(
int(line.get(_number)) for line in line_nodes
)
# If we don't have any information about the source file,
# don't report any violations
if violations is None:
violations = set()
self._info_cache[src_path] = (violations, measured) | python | def _cache_file(self, src_path):
"""
Load the data from `self._xml_roots`
for `src_path`, if it hasn't been already.
"""
# If we have not yet loaded this source file
if src_path not in self._info_cache:
# We only want to keep violations that show up in each xml source.
# Thus, each time, we take the intersection. However, to do this
# we must treat the first time as a special case and just add all
# the violations from the first xml report.
violations = None
# A line is measured if it is measured in any of the reports, so
# we take set union each time and can just start with the empty set
measured = set()
# Loop through the files that contain the xml roots
for xml_document in self._xml_roots:
if xml_document.findall('.[@clover]'):
# see etc/schema/clover.xsd at https://bitbucket.org/atlassian/clover/src
line_nodes = self._get_src_path_line_nodes_clover(xml_document, src_path)
_number = 'num'
_hits = 'count'
elif xml_document.findall('.[@name]'):
# https://github.com/jacoco/jacoco/blob/master/org.jacoco.report/src/org/jacoco/report/xml/report.dtd
line_nodes = self._get_src_path_line_nodes_jacoco(xml_document, src_path)
_number = 'nr'
_hits = 'ci'
else:
# https://github.com/cobertura/web/blob/master/htdocs/xml/coverage-04.dtd
line_nodes = self._get_src_path_line_nodes_cobertura(xml_document, src_path)
_number = 'number'
_hits = 'hits'
if line_nodes is None:
continue
# First case, need to define violations initially
if violations is None:
violations = set(
Violation(int(line.get(_number)), None)
for line in line_nodes
if int(line.get(_hits, 0)) == 0)
# If we already have a violations set,
# take the intersection of the new
# violations set and its old self
else:
violations = violations & set(
Violation(int(line.get(_number)), None)
for line in line_nodes
if int(line.get(_hits, 0)) == 0
)
# Measured is the union of itself and the new measured
measured = measured | set(
int(line.get(_number)) for line in line_nodes
)
# If we don't have any information about the source file,
# don't report any violations
if violations is None:
violations = set()
self._info_cache[src_path] = (violations, measured) | [
"def",
"_cache_file",
"(",
"self",
",",
"src_path",
")",
":",
"# If we have not yet loaded this source file",
"if",
"src_path",
"not",
"in",
"self",
".",
"_info_cache",
":",
"# We only want to keep violations that show up in each xml source.",
"# Thus, each time, we take the inte... | Load the data from `self._xml_roots`
for `src_path`, if it hasn't been already. | [
"Load",
"the",
"data",
"from",
"self",
".",
"_xml_roots",
"for",
"src_path",
"if",
"it",
"hasn",
"t",
"been",
"already",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/violationsreporters/violations_reporter.py#L159-L223 | train | 211,945 |
Bachmann1234/diff-cover | diff_cover/violationsreporters/violations_reporter.py | PylintDriver._process_dupe_code_violation | def _process_dupe_code_violation(self, lines, current_line, message):
"""
The duplicate code violation is a multi line error. This pulls out
all the relevant files
"""
src_paths = []
message_match = self.dupe_code_violation_regex.match(message)
if message_match:
for _ in range(int(message_match.group(1))):
current_line += 1
match = self.multi_line_violation_regex.match(
lines[current_line]
)
src_path, l_number = match.groups()
src_paths.append(('%s.py' % src_path, l_number))
return src_paths | python | def _process_dupe_code_violation(self, lines, current_line, message):
"""
The duplicate code violation is a multi line error. This pulls out
all the relevant files
"""
src_paths = []
message_match = self.dupe_code_violation_regex.match(message)
if message_match:
for _ in range(int(message_match.group(1))):
current_line += 1
match = self.multi_line_violation_regex.match(
lines[current_line]
)
src_path, l_number = match.groups()
src_paths.append(('%s.py' % src_path, l_number))
return src_paths | [
"def",
"_process_dupe_code_violation",
"(",
"self",
",",
"lines",
",",
"current_line",
",",
"message",
")",
":",
"src_paths",
"=",
"[",
"]",
"message_match",
"=",
"self",
".",
"dupe_code_violation_regex",
".",
"match",
"(",
"message",
")",
"if",
"message_match",... | The duplicate code violation is a multi line error. This pulls out
all the relevant files | [
"The",
"duplicate",
"code",
"violation",
"is",
"a",
"multi",
"line",
"error",
".",
"This",
"pulls",
"out",
"all",
"the",
"relevant",
"files"
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/violationsreporters/violations_reporter.py#L364-L379 | train | 211,946 |
Bachmann1234/diff-cover | diff_cover/git_path.py | GitPathTool.set_cwd | def set_cwd(cls, cwd):
"""
Set the cwd that is used to manipulate paths.
"""
if not cwd:
try:
cwd = os.getcwdu()
except AttributeError:
cwd = os.getcwd()
if isinstance(cwd, six.binary_type):
cwd = cwd.decode(sys.getdefaultencoding())
cls._cwd = cwd
cls._root = cls._git_root() | python | def set_cwd(cls, cwd):
"""
Set the cwd that is used to manipulate paths.
"""
if not cwd:
try:
cwd = os.getcwdu()
except AttributeError:
cwd = os.getcwd()
if isinstance(cwd, six.binary_type):
cwd = cwd.decode(sys.getdefaultencoding())
cls._cwd = cwd
cls._root = cls._git_root() | [
"def",
"set_cwd",
"(",
"cls",
",",
"cwd",
")",
":",
"if",
"not",
"cwd",
":",
"try",
":",
"cwd",
"=",
"os",
".",
"getcwdu",
"(",
")",
"except",
"AttributeError",
":",
"cwd",
"=",
"os",
".",
"getcwd",
"(",
")",
"if",
"isinstance",
"(",
"cwd",
",",
... | Set the cwd that is used to manipulate paths. | [
"Set",
"the",
"cwd",
"that",
"is",
"used",
"to",
"manipulate",
"paths",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/git_path.py#L23-L35 | train | 211,947 |
Bachmann1234/diff-cover | diff_cover/git_path.py | GitPathTool.relative_path | def relative_path(cls, git_diff_path):
"""
Returns git_diff_path relative to cwd.
"""
# Remove git_root from src_path for searching the correct filename
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `diff_cover/violations_reporter.py`
# search for `violations_reporter.py`
root_rel_path = os.path.relpath(cls._cwd, cls._root)
rel_path = os.path.relpath(git_diff_path, root_rel_path)
return rel_path | python | def relative_path(cls, git_diff_path):
"""
Returns git_diff_path relative to cwd.
"""
# Remove git_root from src_path for searching the correct filename
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `diff_cover/violations_reporter.py`
# search for `violations_reporter.py`
root_rel_path = os.path.relpath(cls._cwd, cls._root)
rel_path = os.path.relpath(git_diff_path, root_rel_path)
return rel_path | [
"def",
"relative_path",
"(",
"cls",
",",
"git_diff_path",
")",
":",
"# Remove git_root from src_path for searching the correct filename",
"# If cwd is `/home/user/work/diff-cover/diff_cover`",
"# and src_path is `diff_cover/violations_reporter.py`",
"# search for `violations_reporter.py`",
"... | Returns git_diff_path relative to cwd. | [
"Returns",
"git_diff_path",
"relative",
"to",
"cwd",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/git_path.py#L38-L49 | train | 211,948 |
Bachmann1234/diff-cover | diff_cover/diff_reporter.py | BaseDiffReporter._is_path_excluded | def _is_path_excluded(self, path):
"""
Check if a path is excluded.
:param str path:
Path to check against the exclude patterns.
:returns:
True if there are exclude patterns and the path matches,
otherwise False.
"""
exclude = self._exclude
if not exclude:
return False
basename = os.path.basename(path)
if self._fnmatch(basename, exclude):
return True
absolute_path = os.path.abspath(path)
match = self._fnmatch(absolute_path, exclude)
return match | python | def _is_path_excluded(self, path):
"""
Check if a path is excluded.
:param str path:
Path to check against the exclude patterns.
:returns:
True if there are exclude patterns and the path matches,
otherwise False.
"""
exclude = self._exclude
if not exclude:
return False
basename = os.path.basename(path)
if self._fnmatch(basename, exclude):
return True
absolute_path = os.path.abspath(path)
match = self._fnmatch(absolute_path, exclude)
return match | [
"def",
"_is_path_excluded",
"(",
"self",
",",
"path",
")",
":",
"exclude",
"=",
"self",
".",
"_exclude",
"if",
"not",
"exclude",
":",
"return",
"False",
"basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"path",
")",
"if",
"self",
".",
"_fnmatch... | Check if a path is excluded.
:param str path:
Path to check against the exclude patterns.
:returns:
True if there are exclude patterns and the path matches,
otherwise False. | [
"Check",
"if",
"a",
"path",
"is",
"excluded",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/diff_reporter.py#L72-L91 | train | 211,949 |
Bachmann1234/diff-cover | diff_cover/diff_reporter.py | GitDiffReporter.src_paths_changed | def src_paths_changed(self):
"""
See base class docstring.
"""
# Get the diff dictionary
diff_dict = self._git_diff()
# Return the changed file paths (dict keys)
# in alphabetical order
return sorted(diff_dict.keys(), key=lambda x: x.lower()) | python | def src_paths_changed(self):
"""
See base class docstring.
"""
# Get the diff dictionary
diff_dict = self._git_diff()
# Return the changed file paths (dict keys)
# in alphabetical order
return sorted(diff_dict.keys(), key=lambda x: x.lower()) | [
"def",
"src_paths_changed",
"(",
"self",
")",
":",
"# Get the diff dictionary",
"diff_dict",
"=",
"self",
".",
"_git_diff",
"(",
")",
"# Return the changed file paths (dict keys)",
"# in alphabetical order",
"return",
"sorted",
"(",
"diff_dict",
".",
"keys",
"(",
")",
... | See base class docstring. | [
"See",
"base",
"class",
"docstring",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/diff_reporter.py#L141-L151 | train | 211,950 |
Bachmann1234/diff-cover | diff_cover/diff_reporter.py | GitDiffReporter._get_included_diff_results | def _get_included_diff_results(self):
"""
Return a list of stages to be included in the diff results.
"""
included = [self._git_diff_tool.diff_committed(self._compare_branch)]
if not self._ignore_staged:
included.append(self._git_diff_tool.diff_staged())
if not self._ignore_unstaged:
included.append(self._git_diff_tool.diff_unstaged())
return included | python | def _get_included_diff_results(self):
"""
Return a list of stages to be included in the diff results.
"""
included = [self._git_diff_tool.diff_committed(self._compare_branch)]
if not self._ignore_staged:
included.append(self._git_diff_tool.diff_staged())
if not self._ignore_unstaged:
included.append(self._git_diff_tool.diff_unstaged())
return included | [
"def",
"_get_included_diff_results",
"(",
"self",
")",
":",
"included",
"=",
"[",
"self",
".",
"_git_diff_tool",
".",
"diff_committed",
"(",
"self",
".",
"_compare_branch",
")",
"]",
"if",
"not",
"self",
".",
"_ignore_staged",
":",
"included",
".",
"append",
... | Return a list of stages to be included in the diff results. | [
"Return",
"a",
"list",
"of",
"stages",
"to",
"be",
"included",
"in",
"the",
"diff",
"results",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/diff_reporter.py#L165-L175 | train | 211,951 |
Bachmann1234/diff-cover | diff_cover/diff_reporter.py | GitDiffReporter._git_diff | def _git_diff(self):
"""
Run `git diff` and returns a dict in which the keys
are changed file paths and the values are lists of
line numbers.
Guarantees that each line number within a file
is unique (no repeats) and in ascending order.
Returns a cached result if called multiple times.
Raises a GitDiffError if `git diff` has an error.
"""
# If we do not have a cached result, execute `git diff`
if self._diff_dict is None:
result_dict = dict()
for diff_str in self._get_included_diff_results():
# Parse the output of the diff string
diff_dict = self._parse_diff_str(diff_str)
for src_path in diff_dict.keys():
if self._is_path_excluded(src_path):
continue
# If no _supported_extensions provided, or extension present: process
root, extension = os.path.splitext(src_path)
extension = extension[1:].lower()
# 'not self._supported_extensions' tests for both None and empty list []
if not self._supported_extensions or extension in self._supported_extensions:
added_lines, deleted_lines = diff_dict[src_path]
# Remove any lines from the dict that have been deleted
# Include any lines that have been added
result_dict[src_path] = [
line for line in result_dict.get(src_path, [])
if not line in deleted_lines
] + added_lines
# Eliminate repeats and order line numbers
for (src_path, lines) in result_dict.items():
result_dict[src_path] = self._unique_ordered_lines(lines)
# Store the resulting dict
self._diff_dict = result_dict
# Return the diff cache
return self._diff_dict | python | def _git_diff(self):
"""
Run `git diff` and returns a dict in which the keys
are changed file paths and the values are lists of
line numbers.
Guarantees that each line number within a file
is unique (no repeats) and in ascending order.
Returns a cached result if called multiple times.
Raises a GitDiffError if `git diff` has an error.
"""
# If we do not have a cached result, execute `git diff`
if self._diff_dict is None:
result_dict = dict()
for diff_str in self._get_included_diff_results():
# Parse the output of the diff string
diff_dict = self._parse_diff_str(diff_str)
for src_path in diff_dict.keys():
if self._is_path_excluded(src_path):
continue
# If no _supported_extensions provided, or extension present: process
root, extension = os.path.splitext(src_path)
extension = extension[1:].lower()
# 'not self._supported_extensions' tests for both None and empty list []
if not self._supported_extensions or extension in self._supported_extensions:
added_lines, deleted_lines = diff_dict[src_path]
# Remove any lines from the dict that have been deleted
# Include any lines that have been added
result_dict[src_path] = [
line for line in result_dict.get(src_path, [])
if not line in deleted_lines
] + added_lines
# Eliminate repeats and order line numbers
for (src_path, lines) in result_dict.items():
result_dict[src_path] = self._unique_ordered_lines(lines)
# Store the resulting dict
self._diff_dict = result_dict
# Return the diff cache
return self._diff_dict | [
"def",
"_git_diff",
"(",
"self",
")",
":",
"# If we do not have a cached result, execute `git diff`",
"if",
"self",
".",
"_diff_dict",
"is",
"None",
":",
"result_dict",
"=",
"dict",
"(",
")",
"for",
"diff_str",
"in",
"self",
".",
"_get_included_diff_results",
"(",
... | Run `git diff` and returns a dict in which the keys
are changed file paths and the values are lists of
line numbers.
Guarantees that each line number within a file
is unique (no repeats) and in ascending order.
Returns a cached result if called multiple times.
Raises a GitDiffError if `git diff` has an error. | [
"Run",
"git",
"diff",
"and",
"returns",
"a",
"dict",
"in",
"which",
"the",
"keys",
"are",
"changed",
"file",
"paths",
"and",
"the",
"values",
"are",
"lists",
"of",
"line",
"numbers",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/diff_reporter.py#L177-L225 | train | 211,952 |
Bachmann1234/diff-cover | diff_cover/diff_reporter.py | GitDiffReporter._parse_source_sections | def _parse_source_sections(self, diff_str):
"""
Given the output of `git diff`, return a dictionary
with keys that are source file paths.
Each value is a list of lines from the `git diff` output
related to the source file.
Raises a `GitDiffError` if `diff_str` is in an invalid format.
"""
# Create a dict to map source files to lines in the diff output
source_dict = dict()
# Keep track of the current source file
src_path = None
# Signal that we've found a hunk (after starting a source file)
found_hunk = False
# Parse the diff string into sections by source file
for line in diff_str.split('\n'):
# If the line starts with "diff --git"
# or "diff --cc" (in the case of a merge conflict)
# then it is the start of a new source file
if line.startswith('diff --git') or line.startswith('diff --cc'):
# Retrieve the name of the source file
src_path = self._parse_source_line(line)
# Create an entry for the source file, if we don't
# already have one.
if src_path not in source_dict:
source_dict[src_path] = []
# Signal that we're waiting for a hunk for this source file
found_hunk = False
# Every other line is stored in the dictionary for this source file
# once we find a hunk section
else:
# Only add lines if we're in a hunk section
# (ignore index and files changed lines)
if found_hunk or line.startswith('@@'):
# Remember that we found a hunk
found_hunk = True
if src_path is not None:
source_dict[src_path].append(line)
else:
# We tolerate other information before we have
# a source file defined, unless it's a hunk line
if line.startswith("@@"):
msg = "Hunk has no source file: '{}'".format(line)
raise GitDiffError(msg)
return source_dict | python | def _parse_source_sections(self, diff_str):
"""
Given the output of `git diff`, return a dictionary
with keys that are source file paths.
Each value is a list of lines from the `git diff` output
related to the source file.
Raises a `GitDiffError` if `diff_str` is in an invalid format.
"""
# Create a dict to map source files to lines in the diff output
source_dict = dict()
# Keep track of the current source file
src_path = None
# Signal that we've found a hunk (after starting a source file)
found_hunk = False
# Parse the diff string into sections by source file
for line in diff_str.split('\n'):
# If the line starts with "diff --git"
# or "diff --cc" (in the case of a merge conflict)
# then it is the start of a new source file
if line.startswith('diff --git') or line.startswith('diff --cc'):
# Retrieve the name of the source file
src_path = self._parse_source_line(line)
# Create an entry for the source file, if we don't
# already have one.
if src_path not in source_dict:
source_dict[src_path] = []
# Signal that we're waiting for a hunk for this source file
found_hunk = False
# Every other line is stored in the dictionary for this source file
# once we find a hunk section
else:
# Only add lines if we're in a hunk section
# (ignore index and files changed lines)
if found_hunk or line.startswith('@@'):
# Remember that we found a hunk
found_hunk = True
if src_path is not None:
source_dict[src_path].append(line)
else:
# We tolerate other information before we have
# a source file defined, unless it's a hunk line
if line.startswith("@@"):
msg = "Hunk has no source file: '{}'".format(line)
raise GitDiffError(msg)
return source_dict | [
"def",
"_parse_source_sections",
"(",
"self",
",",
"diff_str",
")",
":",
"# Create a dict to map source files to lines in the diff output",
"source_dict",
"=",
"dict",
"(",
")",
"# Keep track of the current source file",
"src_path",
"=",
"None",
"# Signal that we've found a hunk ... | Given the output of `git diff`, return a dictionary
with keys that are source file paths.
Each value is a list of lines from the `git diff` output
related to the source file.
Raises a `GitDiffError` if `diff_str` is in an invalid format. | [
"Given",
"the",
"output",
"of",
"git",
"diff",
"return",
"a",
"dictionary",
"with",
"keys",
"that",
"are",
"source",
"file",
"paths",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/diff_reporter.py#L257-L317 | train | 211,953 |
Bachmann1234/diff-cover | diff_cover/diff_reporter.py | GitDiffReporter._parse_source_line | def _parse_source_line(self, line):
"""
Given a source line in `git diff` output, return the path
to the source file.
"""
if '--git' in line:
regex = self.SRC_FILE_RE
elif '--cc' in line:
regex = self.MERGE_CONFLICT_RE
else:
msg = "Do not recognize format of source in line '{}'".format(line)
raise GitDiffError(msg)
# Parse for the source file path
groups = regex.findall(line)
if len(groups) == 1:
return groups[0]
else:
msg = "Could not parse source path in line '{}'".format(line)
raise GitDiffError(msg) | python | def _parse_source_line(self, line):
"""
Given a source line in `git diff` output, return the path
to the source file.
"""
if '--git' in line:
regex = self.SRC_FILE_RE
elif '--cc' in line:
regex = self.MERGE_CONFLICT_RE
else:
msg = "Do not recognize format of source in line '{}'".format(line)
raise GitDiffError(msg)
# Parse for the source file path
groups = regex.findall(line)
if len(groups) == 1:
return groups[0]
else:
msg = "Could not parse source path in line '{}'".format(line)
raise GitDiffError(msg) | [
"def",
"_parse_source_line",
"(",
"self",
",",
"line",
")",
":",
"if",
"'--git'",
"in",
"line",
":",
"regex",
"=",
"self",
".",
"SRC_FILE_RE",
"elif",
"'--cc'",
"in",
"line",
":",
"regex",
"=",
"self",
".",
"MERGE_CONFLICT_RE",
"else",
":",
"msg",
"=",
... | Given a source line in `git diff` output, return the path
to the source file. | [
"Given",
"a",
"source",
"line",
"in",
"git",
"diff",
"output",
"return",
"the",
"path",
"to",
"the",
"source",
"file",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/diff_reporter.py#L389-L410 | train | 211,954 |
Bachmann1234/diff-cover | diff_cover/diff_reporter.py | GitDiffReporter._parse_hunk_line | def _parse_hunk_line(self, line):
"""
Given a hunk line in `git diff` output, return the line number
at the start of the hunk. A hunk is a segment of code that
contains changes.
The format of the hunk line is:
@@ -k,l +n,m @@ TEXT
where `k,l` represent the start line and length before the changes
and `n,m` represent the start line and length after the changes.
`git diff` will sometimes put a code excerpt from within the hunk
in the `TEXT` section of the line.
"""
# Split the line at the @@ terminators (start and end of the line)
components = line.split('@@')
# The first component should be an empty string, because
# the line starts with '@@'. The second component should
# be the hunk information, and any additional components
# are excerpts from the code.
if len(components) >= 2:
hunk_info = components[1]
groups = self.HUNK_LINE_RE.findall(hunk_info)
if len(groups) == 1:
try:
return int(groups[0])
except ValueError:
msg = "Could not parse '{}' as a line number".format(groups[0])
raise GitDiffError(msg)
else:
msg = "Could not find start of hunk in line '{}'".format(line)
raise GitDiffError(msg)
else:
msg = "Could not parse hunk in line '{}'".format(line)
raise GitDiffError(msg) | python | def _parse_hunk_line(self, line):
"""
Given a hunk line in `git diff` output, return the line number
at the start of the hunk. A hunk is a segment of code that
contains changes.
The format of the hunk line is:
@@ -k,l +n,m @@ TEXT
where `k,l` represent the start line and length before the changes
and `n,m` represent the start line and length after the changes.
`git diff` will sometimes put a code excerpt from within the hunk
in the `TEXT` section of the line.
"""
# Split the line at the @@ terminators (start and end of the line)
components = line.split('@@')
# The first component should be an empty string, because
# the line starts with '@@'. The second component should
# be the hunk information, and any additional components
# are excerpts from the code.
if len(components) >= 2:
hunk_info = components[1]
groups = self.HUNK_LINE_RE.findall(hunk_info)
if len(groups) == 1:
try:
return int(groups[0])
except ValueError:
msg = "Could not parse '{}' as a line number".format(groups[0])
raise GitDiffError(msg)
else:
msg = "Could not find start of hunk in line '{}'".format(line)
raise GitDiffError(msg)
else:
msg = "Could not parse hunk in line '{}'".format(line)
raise GitDiffError(msg) | [
"def",
"_parse_hunk_line",
"(",
"self",
",",
"line",
")",
":",
"# Split the line at the @@ terminators (start and end of the line)",
"components",
"=",
"line",
".",
"split",
"(",
"'@@'",
")",
"# The first component should be an empty string, because",
"# the line starts with '@@'... | Given a hunk line in `git diff` output, return the line number
at the start of the hunk. A hunk is a segment of code that
contains changes.
The format of the hunk line is:
@@ -k,l +n,m @@ TEXT
where `k,l` represent the start line and length before the changes
and `n,m` represent the start line and length after the changes.
`git diff` will sometimes put a code excerpt from within the hunk
in the `TEXT` section of the line. | [
"Given",
"a",
"hunk",
"line",
"in",
"git",
"diff",
"output",
"return",
"the",
"line",
"number",
"at",
"the",
"start",
"of",
"the",
"hunk",
".",
"A",
"hunk",
"is",
"a",
"segment",
"of",
"code",
"that",
"contains",
"changes",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/diff_reporter.py#L412-L455 | train | 211,955 |
Bachmann1234/diff-cover | diff_cover/diff_reporter.py | GitDiffReporter._unique_ordered_lines | def _unique_ordered_lines(line_numbers):
"""
Given a list of line numbers, return a list in which each line
number is included once and the lines are ordered sequentially.
"""
if len(line_numbers) == 0:
return []
# Ensure lines are unique by putting them in a set
line_set = set(line_numbers)
# Retrieve the list from the set, sort it, and return
return sorted([line for line in line_set]) | python | def _unique_ordered_lines(line_numbers):
"""
Given a list of line numbers, return a list in which each line
number is included once and the lines are ordered sequentially.
"""
if len(line_numbers) == 0:
return []
# Ensure lines are unique by putting them in a set
line_set = set(line_numbers)
# Retrieve the list from the set, sort it, and return
return sorted([line for line in line_set]) | [
"def",
"_unique_ordered_lines",
"(",
"line_numbers",
")",
":",
"if",
"len",
"(",
"line_numbers",
")",
"==",
"0",
":",
"return",
"[",
"]",
"# Ensure lines are unique by putting them in a set",
"line_set",
"=",
"set",
"(",
"line_numbers",
")",
"# Retrieve the list from ... | Given a list of line numbers, return a list in which each line
number is included once and the lines are ordered sequentially. | [
"Given",
"a",
"list",
"of",
"line",
"numbers",
"return",
"a",
"list",
"in",
"which",
"each",
"line",
"number",
"is",
"included",
"once",
"and",
"the",
"lines",
"are",
"ordered",
"sequentially",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/diff_reporter.py#L458-L471 | train | 211,956 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | BaseReportGenerator.src_paths | def src_paths(self):
"""
Return a list of source files in the diff
for which we have coverage information.
"""
return {src for src, summary in self._diff_violations().items()
if len(summary.measured_lines) > 0} | python | def src_paths(self):
"""
Return a list of source files in the diff
for which we have coverage information.
"""
return {src for src, summary in self._diff_violations().items()
if len(summary.measured_lines) > 0} | [
"def",
"src_paths",
"(",
"self",
")",
":",
"return",
"{",
"src",
"for",
"src",
",",
"summary",
"in",
"self",
".",
"_diff_violations",
"(",
")",
".",
"items",
"(",
")",
"if",
"len",
"(",
"summary",
".",
"measured_lines",
")",
">",
"0",
"}"
] | Return a list of source files in the diff
for which we have coverage information. | [
"Return",
"a",
"list",
"of",
"source",
"files",
"in",
"the",
"diff",
"for",
"which",
"we",
"have",
"coverage",
"information",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L78-L84 | train | 211,957 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | BaseReportGenerator.percent_covered | def percent_covered(self, src_path):
"""
Return a float percent of lines covered for the source
in `src_path`.
If we have no coverage information for `src_path`, returns None
"""
diff_violations = self._diff_violations().get(src_path)
if diff_violations is None:
return None
# Protect against a divide by zero
num_measured = len(diff_violations.measured_lines)
if num_measured > 0:
num_uncovered = len(diff_violations.lines)
return 100 - float(num_uncovered) / num_measured * 100
else:
return None | python | def percent_covered(self, src_path):
"""
Return a float percent of lines covered for the source
in `src_path`.
If we have no coverage information for `src_path`, returns None
"""
diff_violations = self._diff_violations().get(src_path)
if diff_violations is None:
return None
# Protect against a divide by zero
num_measured = len(diff_violations.measured_lines)
if num_measured > 0:
num_uncovered = len(diff_violations.lines)
return 100 - float(num_uncovered) / num_measured * 100
else:
return None | [
"def",
"percent_covered",
"(",
"self",
",",
"src_path",
")",
":",
"diff_violations",
"=",
"self",
".",
"_diff_violations",
"(",
")",
".",
"get",
"(",
"src_path",
")",
"if",
"diff_violations",
"is",
"None",
":",
"return",
"None",
"# Protect against a divide by ze... | Return a float percent of lines covered for the source
in `src_path`.
If we have no coverage information for `src_path`, returns None | [
"Return",
"a",
"float",
"percent",
"of",
"lines",
"covered",
"for",
"the",
"source",
"in",
"src_path",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L86-L105 | train | 211,958 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | BaseReportGenerator.total_num_lines | def total_num_lines(self):
"""
Return the total number of lines in the diff for
which we have coverage info.
"""
return sum([len(summary.measured_lines) for summary
in self._diff_violations().values()]) | python | def total_num_lines(self):
"""
Return the total number of lines in the diff for
which we have coverage info.
"""
return sum([len(summary.measured_lines) for summary
in self._diff_violations().values()]) | [
"def",
"total_num_lines",
"(",
"self",
")",
":",
"return",
"sum",
"(",
"[",
"len",
"(",
"summary",
".",
"measured_lines",
")",
"for",
"summary",
"in",
"self",
".",
"_diff_violations",
"(",
")",
".",
"values",
"(",
")",
"]",
")"
] | Return the total number of lines in the diff for
which we have coverage info. | [
"Return",
"the",
"total",
"number",
"of",
"lines",
"in",
"the",
"diff",
"for",
"which",
"we",
"have",
"coverage",
"info",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L123-L130 | train | 211,959 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | BaseReportGenerator.total_num_violations | def total_num_violations(self):
"""
Returns the total number of lines in the diff
that are in violation.
"""
return sum(
len(summary.lines)
for summary
in self._diff_violations().values()
) | python | def total_num_violations(self):
"""
Returns the total number of lines in the diff
that are in violation.
"""
return sum(
len(summary.lines)
for summary
in self._diff_violations().values()
) | [
"def",
"total_num_violations",
"(",
"self",
")",
":",
"return",
"sum",
"(",
"len",
"(",
"summary",
".",
"lines",
")",
"for",
"summary",
"in",
"self",
".",
"_diff_violations",
"(",
")",
".",
"values",
"(",
")",
")"
] | Returns the total number of lines in the diff
that are in violation. | [
"Returns",
"the",
"total",
"number",
"of",
"lines",
"in",
"the",
"diff",
"that",
"are",
"in",
"violation",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L132-L142 | train | 211,960 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | TemplateReportGenerator.generate_report | def generate_report(self, output_file):
"""
See base class.
output_file must be a file handler that takes in bytes!
"""
if self.TEMPLATE_NAME is not None:
template = TEMPLATE_ENV.get_template(self.TEMPLATE_NAME)
report = template.render(self._context())
if isinstance(report, six.string_types):
report = report.encode('utf-8')
output_file.write(report) | python | def generate_report(self, output_file):
"""
See base class.
output_file must be a file handler that takes in bytes!
"""
if self.TEMPLATE_NAME is not None:
template = TEMPLATE_ENV.get_template(self.TEMPLATE_NAME)
report = template.render(self._context())
if isinstance(report, six.string_types):
report = report.encode('utf-8')
output_file.write(report) | [
"def",
"generate_report",
"(",
"self",
",",
"output_file",
")",
":",
"if",
"self",
".",
"TEMPLATE_NAME",
"is",
"not",
"None",
":",
"template",
"=",
"TEMPLATE_ENV",
".",
"get_template",
"(",
"self",
".",
"TEMPLATE_NAME",
")",
"report",
"=",
"template",
".",
... | See base class.
output_file must be a file handler that takes in bytes! | [
"See",
"base",
"class",
".",
"output_file",
"must",
"be",
"a",
"file",
"handler",
"that",
"takes",
"in",
"bytes!"
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L207-L220 | train | 211,961 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | TemplateReportGenerator.generate_css | def generate_css(self, output_file):
"""
Generate an external style sheet file.
output_file must be a file handler that takes in bytes!
"""
if self.CSS_TEMPLATE_NAME is not None:
template = TEMPLATE_ENV.get_template(self.CSS_TEMPLATE_NAME)
style = template.render(self._context())
if isinstance(style, six.string_types):
style = style.encode('utf-8')
output_file.write(style) | python | def generate_css(self, output_file):
"""
Generate an external style sheet file.
output_file must be a file handler that takes in bytes!
"""
if self.CSS_TEMPLATE_NAME is not None:
template = TEMPLATE_ENV.get_template(self.CSS_TEMPLATE_NAME)
style = template.render(self._context())
if isinstance(style, six.string_types):
style = style.encode('utf-8')
output_file.write(style) | [
"def",
"generate_css",
"(",
"self",
",",
"output_file",
")",
":",
"if",
"self",
".",
"CSS_TEMPLATE_NAME",
"is",
"not",
"None",
":",
"template",
"=",
"TEMPLATE_ENV",
".",
"get_template",
"(",
"self",
".",
"CSS_TEMPLATE_NAME",
")",
"style",
"=",
"template",
".... | Generate an external style sheet file.
output_file must be a file handler that takes in bytes! | [
"Generate",
"an",
"external",
"style",
"sheet",
"file",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L222-L235 | train | 211,962 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | TemplateReportGenerator._context | def _context(self):
"""
Return the context to pass to the template.
The context is a dict of the form:
{
'css_url': CSS_URL,
'report_name': REPORT_NAME,
'diff_name': DIFF_NAME,
'src_stats': {SRC_PATH: {
'percent_covered': PERCENT_COVERED,
'violation_lines': [LINE_NUM, ...]
}, ... }
'total_num_lines': TOTAL_NUM_LINES,
'total_num_violations': TOTAL_NUM_VIOLATIONS,
'total_percent_covered': TOTAL_PERCENT_COVERED
}
"""
# Calculate the information to pass to the template
src_stats = {
src: self._src_path_stats(src) for src in self.src_paths()
}
# Include snippet style info if we're displaying
# source code snippets
if self.INCLUDE_SNIPPETS:
snippet_style = Snippet.style_defs()
else:
snippet_style = None
return {
'css_url': self.css_url,
'report_name': self.coverage_report_name(),
'diff_name': self.diff_report_name(),
'src_stats': src_stats,
'total_num_lines': self.total_num_lines(),
'total_num_violations': self.total_num_violations(),
'total_percent_covered': self.total_percent_covered(),
'snippet_style': snippet_style
} | python | def _context(self):
"""
Return the context to pass to the template.
The context is a dict of the form:
{
'css_url': CSS_URL,
'report_name': REPORT_NAME,
'diff_name': DIFF_NAME,
'src_stats': {SRC_PATH: {
'percent_covered': PERCENT_COVERED,
'violation_lines': [LINE_NUM, ...]
}, ... }
'total_num_lines': TOTAL_NUM_LINES,
'total_num_violations': TOTAL_NUM_VIOLATIONS,
'total_percent_covered': TOTAL_PERCENT_COVERED
}
"""
# Calculate the information to pass to the template
src_stats = {
src: self._src_path_stats(src) for src in self.src_paths()
}
# Include snippet style info if we're displaying
# source code snippets
if self.INCLUDE_SNIPPETS:
snippet_style = Snippet.style_defs()
else:
snippet_style = None
return {
'css_url': self.css_url,
'report_name': self.coverage_report_name(),
'diff_name': self.diff_report_name(),
'src_stats': src_stats,
'total_num_lines': self.total_num_lines(),
'total_num_violations': self.total_num_violations(),
'total_percent_covered': self.total_percent_covered(),
'snippet_style': snippet_style
} | [
"def",
"_context",
"(",
"self",
")",
":",
"# Calculate the information to pass to the template",
"src_stats",
"=",
"{",
"src",
":",
"self",
".",
"_src_path_stats",
"(",
"src",
")",
"for",
"src",
"in",
"self",
".",
"src_paths",
"(",
")",
"}",
"# Include snippet s... | Return the context to pass to the template.
The context is a dict of the form:
{
'css_url': CSS_URL,
'report_name': REPORT_NAME,
'diff_name': DIFF_NAME,
'src_stats': {SRC_PATH: {
'percent_covered': PERCENT_COVERED,
'violation_lines': [LINE_NUM, ...]
}, ... }
'total_num_lines': TOTAL_NUM_LINES,
'total_num_violations': TOTAL_NUM_VIOLATIONS,
'total_percent_covered': TOTAL_PERCENT_COVERED
} | [
"Return",
"the",
"context",
"to",
"pass",
"to",
"the",
"template",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L237-L278 | train | 211,963 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | TemplateReportGenerator.combine_adjacent_lines | def combine_adjacent_lines(line_numbers):
"""
Given a sorted collection of line numbers this will
turn them to strings and combine adjacent values
[1, 2, 5, 6, 100] -> ["1-2", "5-6", "100"]
"""
combine_template = "{0}-{1}"
combined_list = []
# Add a terminating value of `None` to list
line_numbers.append(None)
start = line_numbers[0]
end = None
for line_number in line_numbers[1:]:
# If the current number is adjacent to the previous number
if (end if end else start) + 1 == line_number:
end = line_number
else:
if end:
combined_list.append(combine_template.format(start, end))
else:
combined_list.append(str(start))
start = line_number
end = None
return combined_list | python | def combine_adjacent_lines(line_numbers):
"""
Given a sorted collection of line numbers this will
turn them to strings and combine adjacent values
[1, 2, 5, 6, 100] -> ["1-2", "5-6", "100"]
"""
combine_template = "{0}-{1}"
combined_list = []
# Add a terminating value of `None` to list
line_numbers.append(None)
start = line_numbers[0]
end = None
for line_number in line_numbers[1:]:
# If the current number is adjacent to the previous number
if (end if end else start) + 1 == line_number:
end = line_number
else:
if end:
combined_list.append(combine_template.format(start, end))
else:
combined_list.append(str(start))
start = line_number
end = None
return combined_list | [
"def",
"combine_adjacent_lines",
"(",
"line_numbers",
")",
":",
"combine_template",
"=",
"\"{0}-{1}\"",
"combined_list",
"=",
"[",
"]",
"# Add a terminating value of `None` to list",
"line_numbers",
".",
"append",
"(",
"None",
")",
"start",
"=",
"line_numbers",
"[",
"... | Given a sorted collection of line numbers this will
turn them to strings and combine adjacent values
[1, 2, 5, 6, 100] -> ["1-2", "5-6", "100"] | [
"Given",
"a",
"sorted",
"collection",
"of",
"line",
"numbers",
"this",
"will",
"turn",
"them",
"to",
"strings",
"and",
"combine",
"adjacent",
"values"
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L281-L307 | train | 211,964 |
Bachmann1234/diff-cover | diff_cover/report_generator.py | TemplateReportGenerator._src_path_stats | def _src_path_stats(self, src_path):
"""
Return a dict of statistics for the source file at `src_path`.
"""
# Find violation lines
violation_lines = self.violation_lines(src_path)
violations = sorted(self._diff_violations()[src_path].violations)
# Load source snippets (if the report will display them)
# If we cannot load the file, then fail gracefully
if self.INCLUDE_SNIPPETS:
try:
snippets = Snippet.load_snippets_html(src_path, violation_lines)
except IOError:
snippets = []
else:
snippets = []
return {
'percent_covered': self.percent_covered(src_path),
'violation_lines': TemplateReportGenerator.combine_adjacent_lines(violation_lines),
'violations': violations,
'snippets_html': snippets
} | python | def _src_path_stats(self, src_path):
"""
Return a dict of statistics for the source file at `src_path`.
"""
# Find violation lines
violation_lines = self.violation_lines(src_path)
violations = sorted(self._diff_violations()[src_path].violations)
# Load source snippets (if the report will display them)
# If we cannot load the file, then fail gracefully
if self.INCLUDE_SNIPPETS:
try:
snippets = Snippet.load_snippets_html(src_path, violation_lines)
except IOError:
snippets = []
else:
snippets = []
return {
'percent_covered': self.percent_covered(src_path),
'violation_lines': TemplateReportGenerator.combine_adjacent_lines(violation_lines),
'violations': violations,
'snippets_html': snippets
} | [
"def",
"_src_path_stats",
"(",
"self",
",",
"src_path",
")",
":",
"# Find violation lines",
"violation_lines",
"=",
"self",
".",
"violation_lines",
"(",
"src_path",
")",
"violations",
"=",
"sorted",
"(",
"self",
".",
"_diff_violations",
"(",
")",
"[",
"src_path"... | Return a dict of statistics for the source file at `src_path`. | [
"Return",
"a",
"dict",
"of",
"statistics",
"for",
"the",
"source",
"file",
"at",
"src_path",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/report_generator.py#L309-L333 | train | 211,965 |
Bachmann1234/diff-cover | diff_cover/command_runner.py | run_command_for_code | def run_command_for_code(command):
"""
Returns command's exit code.
"""
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
process.communicate()
exit_code = process.returncode
return exit_code | python | def run_command_for_code(command):
"""
Returns command's exit code.
"""
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
process.communicate()
exit_code = process.returncode
return exit_code | [
"def",
"run_command_for_code",
"(",
"command",
")",
":",
"process",
"=",
"subprocess",
".",
"Popen",
"(",
"command",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
")",
"process",
".",
"communicate",
"(",
")",... | Returns command's exit code. | [
"Returns",
"command",
"s",
"exit",
"code",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/command_runner.py#L48-L57 | train | 211,966 |
Bachmann1234/diff-cover | diff_cover/snippets.py | Snippet.style_defs | def style_defs(cls):
"""
Return the CSS style definitions required
by the formatted snippet.
"""
formatter = HtmlFormatter()
formatter.style.highlight_color = cls.VIOLATION_COLOR
return formatter.get_style_defs() | python | def style_defs(cls):
"""
Return the CSS style definitions required
by the formatted snippet.
"""
formatter = HtmlFormatter()
formatter.style.highlight_color = cls.VIOLATION_COLOR
return formatter.get_style_defs() | [
"def",
"style_defs",
"(",
"cls",
")",
":",
"formatter",
"=",
"HtmlFormatter",
"(",
")",
"formatter",
".",
"style",
".",
"highlight_color",
"=",
"cls",
".",
"VIOLATION_COLOR",
"return",
"formatter",
".",
"get_style_defs",
"(",
")"
] | Return the CSS style definitions required
by the formatted snippet. | [
"Return",
"the",
"CSS",
"style",
"definitions",
"required",
"by",
"the",
"formatted",
"snippet",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/snippets.py#L81-L88 | train | 211,967 |
Bachmann1234/diff-cover | diff_cover/snippets.py | Snippet.html | def html(self):
"""
Return an HTML representation of the snippet.
"""
formatter = HtmlFormatter(
cssclass=self.DIV_CSS_CLASS,
linenos=True,
linenostart=self._start_line,
hl_lines=self._shift_lines(
self._violation_lines,
self._start_line
),
lineanchors=self._src_filename
)
return pygments.format(self.src_tokens(), formatter) | python | def html(self):
"""
Return an HTML representation of the snippet.
"""
formatter = HtmlFormatter(
cssclass=self.DIV_CSS_CLASS,
linenos=True,
linenostart=self._start_line,
hl_lines=self._shift_lines(
self._violation_lines,
self._start_line
),
lineanchors=self._src_filename
)
return pygments.format(self.src_tokens(), formatter) | [
"def",
"html",
"(",
"self",
")",
":",
"formatter",
"=",
"HtmlFormatter",
"(",
"cssclass",
"=",
"self",
".",
"DIV_CSS_CLASS",
",",
"linenos",
"=",
"True",
",",
"linenostart",
"=",
"self",
".",
"_start_line",
",",
"hl_lines",
"=",
"self",
".",
"_shift_lines"... | Return an HTML representation of the snippet. | [
"Return",
"an",
"HTML",
"representation",
"of",
"the",
"snippet",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/snippets.py#L90-L105 | train | 211,968 |
Bachmann1234/diff-cover | diff_cover/snippets.py | Snippet.load_snippets_html | def load_snippets_html(cls, src_path, violation_lines):
"""
Load snippets from the file at `src_path` and format
them as HTML.
See `load_snippets()` for details.
"""
snippet_list = cls.load_snippets(src_path, violation_lines)
return [snippet.html() for snippet in snippet_list] | python | def load_snippets_html(cls, src_path, violation_lines):
"""
Load snippets from the file at `src_path` and format
them as HTML.
See `load_snippets()` for details.
"""
snippet_list = cls.load_snippets(src_path, violation_lines)
return [snippet.html() for snippet in snippet_list] | [
"def",
"load_snippets_html",
"(",
"cls",
",",
"src_path",
",",
"violation_lines",
")",
":",
"snippet_list",
"=",
"cls",
".",
"load_snippets",
"(",
"src_path",
",",
"violation_lines",
")",
"return",
"[",
"snippet",
".",
"html",
"(",
")",
"for",
"snippet",
"in... | Load snippets from the file at `src_path` and format
them as HTML.
See `load_snippets()` for details. | [
"Load",
"snippets",
"from",
"the",
"file",
"at",
"src_path",
"and",
"format",
"them",
"as",
"HTML",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/snippets.py#L130-L138 | train | 211,969 |
Bachmann1234/diff-cover | diff_cover/snippets.py | Snippet._group_tokens | def _group_tokens(cls, token_stream, range_list):
"""
Group tokens into snippet ranges.
`token_stream` is a generator that produces
`(token_type, value)` tuples,
`range_list` is a list of `(start, end)` tuples representing
the (inclusive) range of line numbers for each snippet.
Assumes that `range_list` is an ascending order by start value.
Returns a dict mapping ranges to lists of tokens:
{
(4, 10): [(ttype_1, val_1), (ttype_2, val_2), ...],
(29, 39): [(ttype_3, val_3), ...],
...
}
The algorithm is slightly complicated because a single token
can contain multiple line breaks.
"""
# Create a map from ranges (start/end tuples) to tokens
token_map = {rng: [] for rng in range_list}
# Keep track of the current line number; we will
# increment this as we encounter newlines in token values
line_num = 1
for ttype, val in token_stream:
# If there are newlines in this token,
# we need to split it up and check whether
# each line within the token is within one
# of our ranges.
if '\n' in val:
val_lines = val.split('\n')
# Check if the tokens match each range
for (start, end), filtered_tokens in six.iteritems(token_map):
# Filter out lines that are not in this range
include_vals = [
val_lines[i] for i in
range(0, len(val_lines))
if i + line_num in range(start, end + 1)
]
# If we found any lines, store the tokens
if len(include_vals) > 0:
token = (ttype, '\n'.join(include_vals))
filtered_tokens.append(token)
# Increment the line number
# by the number of lines we found
line_num += len(val_lines) - 1
# No newline in this token
# If we're in the line range, add it
else:
# Check if the tokens match each range
for (start, end), filtered_tokens in six.iteritems(token_map):
# If we got a match, store the token
if line_num in range(start, end + 1):
filtered_tokens.append((ttype, val))
# Otherwise, ignore the token
return token_map | python | def _group_tokens(cls, token_stream, range_list):
"""
Group tokens into snippet ranges.
`token_stream` is a generator that produces
`(token_type, value)` tuples,
`range_list` is a list of `(start, end)` tuples representing
the (inclusive) range of line numbers for each snippet.
Assumes that `range_list` is an ascending order by start value.
Returns a dict mapping ranges to lists of tokens:
{
(4, 10): [(ttype_1, val_1), (ttype_2, val_2), ...],
(29, 39): [(ttype_3, val_3), ...],
...
}
The algorithm is slightly complicated because a single token
can contain multiple line breaks.
"""
# Create a map from ranges (start/end tuples) to tokens
token_map = {rng: [] for rng in range_list}
# Keep track of the current line number; we will
# increment this as we encounter newlines in token values
line_num = 1
for ttype, val in token_stream:
# If there are newlines in this token,
# we need to split it up and check whether
# each line within the token is within one
# of our ranges.
if '\n' in val:
val_lines = val.split('\n')
# Check if the tokens match each range
for (start, end), filtered_tokens in six.iteritems(token_map):
# Filter out lines that are not in this range
include_vals = [
val_lines[i] for i in
range(0, len(val_lines))
if i + line_num in range(start, end + 1)
]
# If we found any lines, store the tokens
if len(include_vals) > 0:
token = (ttype, '\n'.join(include_vals))
filtered_tokens.append(token)
# Increment the line number
# by the number of lines we found
line_num += len(val_lines) - 1
# No newline in this token
# If we're in the line range, add it
else:
# Check if the tokens match each range
for (start, end), filtered_tokens in six.iteritems(token_map):
# If we got a match, store the token
if line_num in range(start, end + 1):
filtered_tokens.append((ttype, val))
# Otherwise, ignore the token
return token_map | [
"def",
"_group_tokens",
"(",
"cls",
",",
"token_stream",
",",
"range_list",
")",
":",
"# Create a map from ranges (start/end tuples) to tokens",
"token_map",
"=",
"{",
"rng",
":",
"[",
"]",
"for",
"rng",
"in",
"range_list",
"}",
"# Keep track of the current line number;... | Group tokens into snippet ranges.
`token_stream` is a generator that produces
`(token_type, value)` tuples,
`range_list` is a list of `(start, end)` tuples representing
the (inclusive) range of line numbers for each snippet.
Assumes that `range_list` is an ascending order by start value.
Returns a dict mapping ranges to lists of tokens:
{
(4, 10): [(ttype_1, val_1), (ttype_2, val_2), ...],
(29, 39): [(ttype_3, val_3), ...],
...
}
The algorithm is slightly complicated because a single token
can contain multiple line breaks. | [
"Group",
"tokens",
"into",
"snippet",
"ranges",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/snippets.py#L199-L269 | train | 211,970 |
Bachmann1234/diff-cover | diff_cover/violationsreporters/base.py | QualityReporter.violations | def violations(self, src_path):
"""
Return a list of Violations recorded in `src_path`.
"""
if not any(src_path.endswith(ext) for ext in self.driver.supported_extensions):
return []
if src_path not in self.violations_dict:
if self.reports:
self.violations_dict = self.driver.parse_reports(self.reports)
else:
if self.driver_tool_installed is None:
self.driver_tool_installed = self.driver.installed()
if not self.driver_tool_installed:
raise EnvironmentError("{} is not installed".format(self.driver.name))
command = copy.deepcopy(self.driver.command)
if self.options:
command.append(self.options)
if os.path.exists(src_path):
command.append(src_path.encode(sys.getfilesystemencoding()))
output, _ = execute(command, self.driver.exit_codes)
self.violations_dict.update(self.driver.parse_reports([output]))
return self.violations_dict[src_path] | python | def violations(self, src_path):
"""
Return a list of Violations recorded in `src_path`.
"""
if not any(src_path.endswith(ext) for ext in self.driver.supported_extensions):
return []
if src_path not in self.violations_dict:
if self.reports:
self.violations_dict = self.driver.parse_reports(self.reports)
else:
if self.driver_tool_installed is None:
self.driver_tool_installed = self.driver.installed()
if not self.driver_tool_installed:
raise EnvironmentError("{} is not installed".format(self.driver.name))
command = copy.deepcopy(self.driver.command)
if self.options:
command.append(self.options)
if os.path.exists(src_path):
command.append(src_path.encode(sys.getfilesystemencoding()))
output, _ = execute(command, self.driver.exit_codes)
self.violations_dict.update(self.driver.parse_reports([output]))
return self.violations_dict[src_path] | [
"def",
"violations",
"(",
"self",
",",
"src_path",
")",
":",
"if",
"not",
"any",
"(",
"src_path",
".",
"endswith",
"(",
"ext",
")",
"for",
"ext",
"in",
"self",
".",
"driver",
".",
"supported_extensions",
")",
":",
"return",
"[",
"]",
"if",
"src_path",
... | Return a list of Violations recorded in `src_path`. | [
"Return",
"a",
"list",
"of",
"Violations",
"recorded",
"in",
"src_path",
"."
] | 901cb3fc986982961785e841658085ead453c6c9 | https://github.com/Bachmann1234/diff-cover/blob/901cb3fc986982961785e841658085ead453c6c9/diff_cover/violationsreporters/base.py#L139-L161 | train | 211,971 |
ThreatResponse/margaritashotgun | margaritashotgun/ssh_tunnel.py | SSHTunnel.configure | def configure(self, transport, auth, address, port):
"""
Connect paramiko transport
:type auth: :py:class`margaritashotgun.auth.AuthMethods`
:param auth: authentication object
:type address: str
:param address: remote server ip or hostname
:type port: int
:param port: remote server port
:type hostkey: :py:class:`paramiko.key.HostKey`
:param hostkey: remote host ssh server key
"""
self.transport = transport
self.username = auth.username
self.address = address
self.port = port | python | def configure(self, transport, auth, address, port):
"""
Connect paramiko transport
:type auth: :py:class`margaritashotgun.auth.AuthMethods`
:param auth: authentication object
:type address: str
:param address: remote server ip or hostname
:type port: int
:param port: remote server port
:type hostkey: :py:class:`paramiko.key.HostKey`
:param hostkey: remote host ssh server key
"""
self.transport = transport
self.username = auth.username
self.address = address
self.port = port | [
"def",
"configure",
"(",
"self",
",",
"transport",
",",
"auth",
",",
"address",
",",
"port",
")",
":",
"self",
".",
"transport",
"=",
"transport",
"self",
".",
"username",
"=",
"auth",
".",
"username",
"self",
".",
"address",
"=",
"address",
"self",
".... | Connect paramiko transport
:type auth: :py:class`margaritashotgun.auth.AuthMethods`
:param auth: authentication object
:type address: str
:param address: remote server ip or hostname
:type port: int
:param port: remote server port
:type hostkey: :py:class:`paramiko.key.HostKey`
:param hostkey: remote host ssh server key | [
"Connect",
"paramiko",
"transport"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/ssh_tunnel.py#L29-L46 | train | 211,972 |
ThreatResponse/margaritashotgun | margaritashotgun/ssh_tunnel.py | SSHTunnel.start | def start(self, local_port, remote_address, remote_port):
"""
Start ssh tunnel
type: local_port: int
param: local_port: local tunnel endpoint ip binding
type: remote_address: str
param: remote_address: Remote tunnel endpoing ip binding
type: remote_port: int
param: remote_port: Remote tunnel endpoint port binding
"""
self.local_port = local_port
self.remote_address = remote_address
self.remote_port = remote_port
logger.debug(("Starting ssh tunnel {0}:{1}:{2} for "
"{3}@{4}".format(local_port, remote_address, remote_port,
self.username, self.address)))
self.forward = Forward(local_port,
remote_address,
remote_port,
self.transport)
self.forward.start() | python | def start(self, local_port, remote_address, remote_port):
"""
Start ssh tunnel
type: local_port: int
param: local_port: local tunnel endpoint ip binding
type: remote_address: str
param: remote_address: Remote tunnel endpoing ip binding
type: remote_port: int
param: remote_port: Remote tunnel endpoint port binding
"""
self.local_port = local_port
self.remote_address = remote_address
self.remote_port = remote_port
logger.debug(("Starting ssh tunnel {0}:{1}:{2} for "
"{3}@{4}".format(local_port, remote_address, remote_port,
self.username, self.address)))
self.forward = Forward(local_port,
remote_address,
remote_port,
self.transport)
self.forward.start() | [
"def",
"start",
"(",
"self",
",",
"local_port",
",",
"remote_address",
",",
"remote_port",
")",
":",
"self",
".",
"local_port",
"=",
"local_port",
"self",
".",
"remote_address",
"=",
"remote_address",
"self",
".",
"remote_port",
"=",
"remote_port",
"logger",
"... | Start ssh tunnel
type: local_port: int
param: local_port: local tunnel endpoint ip binding
type: remote_address: str
param: remote_address: Remote tunnel endpoing ip binding
type: remote_port: int
param: remote_port: Remote tunnel endpoint port binding | [
"Start",
"ssh",
"tunnel"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/ssh_tunnel.py#L48-L70 | train | 211,973 |
ThreatResponse/margaritashotgun | margaritashotgun/ssh_tunnel.py | SSHTunnel.cleanup | def cleanup(self):
"""
Cleanup resources used during execution
"""
if self.local_port is not None:
logger.debug(("Stopping ssh tunnel {0}:{1}:{2} for "
"{3}@{4}".format(self.local_port,
self.remote_address,
self.remote_port,
self.username,
self.address)))
if self.forward is not None:
self.forward.stop()
self.forward.join()
if self.transport is not None:
self.transport.close() | python | def cleanup(self):
"""
Cleanup resources used during execution
"""
if self.local_port is not None:
logger.debug(("Stopping ssh tunnel {0}:{1}:{2} for "
"{3}@{4}".format(self.local_port,
self.remote_address,
self.remote_port,
self.username,
self.address)))
if self.forward is not None:
self.forward.stop()
self.forward.join()
if self.transport is not None:
self.transport.close() | [
"def",
"cleanup",
"(",
"self",
")",
":",
"if",
"self",
".",
"local_port",
"is",
"not",
"None",
":",
"logger",
".",
"debug",
"(",
"(",
"\"Stopping ssh tunnel {0}:{1}:{2} for \"",
"\"{3}@{4}\"",
".",
"format",
"(",
"self",
".",
"local_port",
",",
"self",
".",
... | Cleanup resources used during execution | [
"Cleanup",
"resources",
"used",
"during",
"execution"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/ssh_tunnel.py#L72-L87 | train | 211,974 |
ThreatResponse/margaritashotgun | margaritashotgun/__init__.py | set_stream_logger | def set_stream_logger(name='margaritashotgun', level=logging.INFO,
format_string=None):
"""
Add a stream handler for the provided name and level to the logging module.
>>> import margaritashotgun
>>> margaritashotgun.set_stream_logger('marsho', logging.DEBUG)
:type name: string
:param name: Log name
:type level: int
:param level: Logging level
:type format_string: str
:param format_string: Log message format
"""
if format_string is None:
format_string = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
time_format = "%Y-%m-%dT%H:%M:%S"
logger = logging.getLogger(name)
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
formatter = logging.Formatter(format_string, time_format)
handler.setFormatter(formatter)
logger.addHandler(handler)
paramiko_log_level = logging.CRITICAL
paramiko_log = logging.getLogger('paramiko')
paramiko_log.setLevel(paramiko_log_level)
paramiko_handler = logging.StreamHandler()
paramiko_handler.setLevel(paramiko_log_level)
paramiko_handler.setFormatter(formatter)
paramiko_log.addHandler(paramiko_handler) | python | def set_stream_logger(name='margaritashotgun', level=logging.INFO,
format_string=None):
"""
Add a stream handler for the provided name and level to the logging module.
>>> import margaritashotgun
>>> margaritashotgun.set_stream_logger('marsho', logging.DEBUG)
:type name: string
:param name: Log name
:type level: int
:param level: Logging level
:type format_string: str
:param format_string: Log message format
"""
if format_string is None:
format_string = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
time_format = "%Y-%m-%dT%H:%M:%S"
logger = logging.getLogger(name)
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
formatter = logging.Formatter(format_string, time_format)
handler.setFormatter(formatter)
logger.addHandler(handler)
paramiko_log_level = logging.CRITICAL
paramiko_log = logging.getLogger('paramiko')
paramiko_log.setLevel(paramiko_log_level)
paramiko_handler = logging.StreamHandler()
paramiko_handler.setLevel(paramiko_log_level)
paramiko_handler.setFormatter(formatter)
paramiko_log.addHandler(paramiko_handler) | [
"def",
"set_stream_logger",
"(",
"name",
"=",
"'margaritashotgun'",
",",
"level",
"=",
"logging",
".",
"INFO",
",",
"format_string",
"=",
"None",
")",
":",
"if",
"format_string",
"is",
"None",
":",
"format_string",
"=",
"\"%(asctime)s - %(name)s - %(levelname)s - %(... | Add a stream handler for the provided name and level to the logging module.
>>> import margaritashotgun
>>> margaritashotgun.set_stream_logger('marsho', logging.DEBUG)
:type name: string
:param name: Log name
:type level: int
:param level: Logging level
:type format_string: str
:param format_string: Log message format | [
"Add",
"a",
"stream",
"handler",
"for",
"the",
"provided",
"name",
"and",
"level",
"to",
"the",
"logging",
"module",
"."
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/__init__.py#L8-L42 | train | 211,975 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.connect | def connect(self, username, password, key, address, port, jump_host):
"""
Connect ssh tunnel and shell executor to remote host
:type username: str
:param username: username for authentication
:type password: str
:param password: password for authentication, may be used to unlock rsa key
:type key: str
:param key: path to rsa key for authentication
:type address: str
:param address: address for remote host
:type port: int
:param port: ssh port for remote host
"""
if port is None:
self.remote_port = 22
else:
self.remote_port = int(port)
auth = Auth(username=username, password=password, key=key)
if jump_host is not None:
jump_auth = Auth(username=jump_host['username'],
password=jump_host['password'],
key=jump_host['key'])
if jump_host['port'] is None:
jump_host['port'] = 22
else:
jump_auth = None
self.shell.connect(auth, address, self.remote_port, jump_host, jump_auth)
transport = self.shell.transport()
self.tunnel.configure(transport, auth, address, self.remote_port)
self.remote_addr = address | python | def connect(self, username, password, key, address, port, jump_host):
"""
Connect ssh tunnel and shell executor to remote host
:type username: str
:param username: username for authentication
:type password: str
:param password: password for authentication, may be used to unlock rsa key
:type key: str
:param key: path to rsa key for authentication
:type address: str
:param address: address for remote host
:type port: int
:param port: ssh port for remote host
"""
if port is None:
self.remote_port = 22
else:
self.remote_port = int(port)
auth = Auth(username=username, password=password, key=key)
if jump_host is not None:
jump_auth = Auth(username=jump_host['username'],
password=jump_host['password'],
key=jump_host['key'])
if jump_host['port'] is None:
jump_host['port'] = 22
else:
jump_auth = None
self.shell.connect(auth, address, self.remote_port, jump_host, jump_auth)
transport = self.shell.transport()
self.tunnel.configure(transport, auth, address, self.remote_port)
self.remote_addr = address | [
"def",
"connect",
"(",
"self",
",",
"username",
",",
"password",
",",
"key",
",",
"address",
",",
"port",
",",
"jump_host",
")",
":",
"if",
"port",
"is",
"None",
":",
"self",
".",
"remote_port",
"=",
"22",
"else",
":",
"self",
".",
"remote_port",
"="... | Connect ssh tunnel and shell executor to remote host
:type username: str
:param username: username for authentication
:type password: str
:param password: password for authentication, may be used to unlock rsa key
:type key: str
:param key: path to rsa key for authentication
:type address: str
:param address: address for remote host
:type port: int
:param port: ssh port for remote host | [
"Connect",
"ssh",
"tunnel",
"and",
"shell",
"executor",
"to",
"remote",
"host"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L126-L158 | train | 211,976 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.start_tunnel | def start_tunnel(self, local_port, remote_address, remote_port):
"""
Start ssh forward tunnel
:type local_port: int
:param local_port: local port binding for ssh tunnel
:type remote_address: str
:param remote_address: remote tunnel endpoint bind address
:type remote_port: int
:param remote_port: remote tunnel endpoint bind port
"""
self.tunnel.start(local_port, remote_address, remote_port)
self.tunnel_port = local_port | python | def start_tunnel(self, local_port, remote_address, remote_port):
"""
Start ssh forward tunnel
:type local_port: int
:param local_port: local port binding for ssh tunnel
:type remote_address: str
:param remote_address: remote tunnel endpoint bind address
:type remote_port: int
:param remote_port: remote tunnel endpoint bind port
"""
self.tunnel.start(local_port, remote_address, remote_port)
self.tunnel_port = local_port | [
"def",
"start_tunnel",
"(",
"self",
",",
"local_port",
",",
"remote_address",
",",
"remote_port",
")",
":",
"self",
".",
"tunnel",
".",
"start",
"(",
"local_port",
",",
"remote_address",
",",
"remote_port",
")",
"self",
".",
"tunnel_port",
"=",
"local_port"
] | Start ssh forward tunnel
:type local_port: int
:param local_port: local port binding for ssh tunnel
:type remote_address: str
:param remote_address: remote tunnel endpoint bind address
:type remote_port: int
:param remote_port: remote tunnel endpoint bind port | [
"Start",
"ssh",
"forward",
"tunnel"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L160-L172 | train | 211,977 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.mem_size | def mem_size(self):
"""
Returns the memory size in bytes of the remote host
"""
result = self.shell.execute(self.commands.mem_size.value)
stdout = self.shell.decode(result['stdout'])
stderr = self.shell.decode(result['stderr'])
return int(stdout) | python | def mem_size(self):
"""
Returns the memory size in bytes of the remote host
"""
result = self.shell.execute(self.commands.mem_size.value)
stdout = self.shell.decode(result['stdout'])
stderr = self.shell.decode(result['stderr'])
return int(stdout) | [
"def",
"mem_size",
"(",
"self",
")",
":",
"result",
"=",
"self",
".",
"shell",
".",
"execute",
"(",
"self",
".",
"commands",
".",
"mem_size",
".",
"value",
")",
"stdout",
"=",
"self",
".",
"shell",
".",
"decode",
"(",
"result",
"[",
"'stdout'",
"]",
... | Returns the memory size in bytes of the remote host | [
"Returns",
"the",
"memory",
"size",
"in",
"bytes",
"of",
"the",
"remote",
"host"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L174-L181 | train | 211,978 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.kernel_version | def kernel_version(self):
"""
Returns the kernel kernel version of the remote host
"""
result = self.shell.execute(self.commands.kernel_version.value)
stdout = self.shell.decode(result['stdout'])
stderr = self.shell.decode(result['stderr'])
return stdout | python | def kernel_version(self):
"""
Returns the kernel kernel version of the remote host
"""
result = self.shell.execute(self.commands.kernel_version.value)
stdout = self.shell.decode(result['stdout'])
stderr = self.shell.decode(result['stderr'])
return stdout | [
"def",
"kernel_version",
"(",
"self",
")",
":",
"result",
"=",
"self",
".",
"shell",
".",
"execute",
"(",
"self",
".",
"commands",
".",
"kernel_version",
".",
"value",
")",
"stdout",
"=",
"self",
".",
"shell",
".",
"decode",
"(",
"result",
"[",
"'stdou... | Returns the kernel kernel version of the remote host | [
"Returns",
"the",
"kernel",
"kernel",
"version",
"of",
"the",
"remote",
"host"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L183-L190 | train | 211,979 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.wait_for_lime | def wait_for_lime(self, listen_port, listen_address="0.0.0.0",
max_tries=20, wait=1):
"""
Wait for lime to load unless max_retries is exceeded
:type listen_port: int
:param listen_port: port LiME is listening for connections on
:type listen_address: str
:param listen_address: address LiME is listening for connections on
:type max_tries: int
:param max_tries: maximum number of checks that LiME has loaded
:type wait: int
:param wait: time to wait between checks
"""
tries = 0
pattern = self.commands.lime_pattern.value.format(listen_address,
listen_port)
lime_loaded = False
while tries < max_tries and lime_loaded is False:
lime_loaded = self.check_for_lime(pattern)
tries = tries + 1
time.sleep(wait)
return lime_loaded | python | def wait_for_lime(self, listen_port, listen_address="0.0.0.0",
max_tries=20, wait=1):
"""
Wait for lime to load unless max_retries is exceeded
:type listen_port: int
:param listen_port: port LiME is listening for connections on
:type listen_address: str
:param listen_address: address LiME is listening for connections on
:type max_tries: int
:param max_tries: maximum number of checks that LiME has loaded
:type wait: int
:param wait: time to wait between checks
"""
tries = 0
pattern = self.commands.lime_pattern.value.format(listen_address,
listen_port)
lime_loaded = False
while tries < max_tries and lime_loaded is False:
lime_loaded = self.check_for_lime(pattern)
tries = tries + 1
time.sleep(wait)
return lime_loaded | [
"def",
"wait_for_lime",
"(",
"self",
",",
"listen_port",
",",
"listen_address",
"=",
"\"0.0.0.0\"",
",",
"max_tries",
"=",
"20",
",",
"wait",
"=",
"1",
")",
":",
"tries",
"=",
"0",
"pattern",
"=",
"self",
".",
"commands",
".",
"lime_pattern",
".",
"value... | Wait for lime to load unless max_retries is exceeded
:type listen_port: int
:param listen_port: port LiME is listening for connections on
:type listen_address: str
:param listen_address: address LiME is listening for connections on
:type max_tries: int
:param max_tries: maximum number of checks that LiME has loaded
:type wait: int
:param wait: time to wait between checks | [
"Wait",
"for",
"lime",
"to",
"load",
"unless",
"max_retries",
"is",
"exceeded"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L192-L214 | train | 211,980 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.check_for_lime | def check_for_lime(self, pattern):
"""
Check to see if LiME has loaded on the remote system
:type pattern: str
:param pattern: pattern to check output against
:type listen_port: int
:param listen_port: port LiME is listening for connections on
"""
check = self.commands.lime_check.value
lime_loaded = False
result = self.shell.execute(check)
stdout = self.shell.decode(result['stdout'])
connections = self.net_parser.parse(stdout)
for conn in connections:
local_addr, remote_addr = conn
if local_addr == pattern:
lime_loaded = True
break
return lime_loaded | python | def check_for_lime(self, pattern):
"""
Check to see if LiME has loaded on the remote system
:type pattern: str
:param pattern: pattern to check output against
:type listen_port: int
:param listen_port: port LiME is listening for connections on
"""
check = self.commands.lime_check.value
lime_loaded = False
result = self.shell.execute(check)
stdout = self.shell.decode(result['stdout'])
connections = self.net_parser.parse(stdout)
for conn in connections:
local_addr, remote_addr = conn
if local_addr == pattern:
lime_loaded = True
break
return lime_loaded | [
"def",
"check_for_lime",
"(",
"self",
",",
"pattern",
")",
":",
"check",
"=",
"self",
".",
"commands",
".",
"lime_check",
".",
"value",
"lime_loaded",
"=",
"False",
"result",
"=",
"self",
".",
"shell",
".",
"execute",
"(",
"check",
")",
"stdout",
"=",
... | Check to see if LiME has loaded on the remote system
:type pattern: str
:param pattern: pattern to check output against
:type listen_port: int
:param listen_port: port LiME is listening for connections on | [
"Check",
"to",
"see",
"if",
"LiME",
"has",
"loaded",
"on",
"the",
"remote",
"system"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L216-L237 | train | 211,981 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.upload_module | def upload_module(self, local_path=None, remote_path="/tmp/lime.ko"):
"""
Upload LiME kernel module to remote host
:type local_path: str
:param local_path: local path to lime kernel module
:type remote_path: str
:param remote_path: remote path to upload lime kernel module
"""
if local_path is None:
raise FileNotFoundFoundError(local_path)
self.shell.upload_file(local_path, remote_path) | python | def upload_module(self, local_path=None, remote_path="/tmp/lime.ko"):
"""
Upload LiME kernel module to remote host
:type local_path: str
:param local_path: local path to lime kernel module
:type remote_path: str
:param remote_path: remote path to upload lime kernel module
"""
if local_path is None:
raise FileNotFoundFoundError(local_path)
self.shell.upload_file(local_path, remote_path) | [
"def",
"upload_module",
"(",
"self",
",",
"local_path",
"=",
"None",
",",
"remote_path",
"=",
"\"/tmp/lime.ko\"",
")",
":",
"if",
"local_path",
"is",
"None",
":",
"raise",
"FileNotFoundFoundError",
"(",
"local_path",
")",
"self",
".",
"shell",
".",
"upload_fil... | Upload LiME kernel module to remote host
:type local_path: str
:param local_path: local path to lime kernel module
:type remote_path: str
:param remote_path: remote path to upload lime kernel module | [
"Upload",
"LiME",
"kernel",
"module",
"to",
"remote",
"host"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L239-L250 | train | 211,982 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.load_lime | def load_lime(self, remote_path, listen_port, dump_format='lime'):
"""
Load LiME kernel module from remote filesystem
:type remote_path: str
:param remote_path: path to LiME kernel module on remote host
:type listen_port: int
:param listen_port: port LiME uses to listen to remote connections
:type dump_format: str
:param dump_format: LiME memory dump file format
"""
load_command = self.commands.load_lime.value.format(remote_path,
listen_port,
dump_format)
self.shell.execute_async(load_command) | python | def load_lime(self, remote_path, listen_port, dump_format='lime'):
"""
Load LiME kernel module from remote filesystem
:type remote_path: str
:param remote_path: path to LiME kernel module on remote host
:type listen_port: int
:param listen_port: port LiME uses to listen to remote connections
:type dump_format: str
:param dump_format: LiME memory dump file format
"""
load_command = self.commands.load_lime.value.format(remote_path,
listen_port,
dump_format)
self.shell.execute_async(load_command) | [
"def",
"load_lime",
"(",
"self",
",",
"remote_path",
",",
"listen_port",
",",
"dump_format",
"=",
"'lime'",
")",
":",
"load_command",
"=",
"self",
".",
"commands",
".",
"load_lime",
".",
"value",
".",
"format",
"(",
"remote_path",
",",
"listen_port",
",",
... | Load LiME kernel module from remote filesystem
:type remote_path: str
:param remote_path: path to LiME kernel module on remote host
:type listen_port: int
:param listen_port: port LiME uses to listen to remote connections
:type dump_format: str
:param dump_format: LiME memory dump file format | [
"Load",
"LiME",
"kernel",
"module",
"from",
"remote",
"filesystem"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L252-L266 | train | 211,983 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_host.py | Host.cleanup | def cleanup(self):
"""
Release resources used by supporting classes
"""
try:
self.unload_lime()
except AttributeError as ex:
pass
self.tunnel.cleanup()
self.shell.cleanup() | python | def cleanup(self):
"""
Release resources used by supporting classes
"""
try:
self.unload_lime()
except AttributeError as ex:
pass
self.tunnel.cleanup()
self.shell.cleanup() | [
"def",
"cleanup",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"unload_lime",
"(",
")",
"except",
"AttributeError",
"as",
"ex",
":",
"pass",
"self",
".",
"tunnel",
".",
"cleanup",
"(",
")",
"self",
".",
"shell",
".",
"cleanup",
"(",
")"
] | Release resources used by supporting classes | [
"Release",
"resources",
"used",
"by",
"supporting",
"classes"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_host.py#L294-L303 | train | 211,984 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_shell.py | RemoteShell.connect | def connect(self, auth, address, port, jump_host, jump_auth):
"""
Creates an ssh session to a remote host
:type auth: :py:class:`margaritashotgun.auth.AuthMethods`
:param auth: Authentication object
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port
"""
try:
self.target_address = address
sock = None
if jump_host is not None:
self.jump_host_ssh = paramiko.SSHClient()
self.jump_host_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.connect_with_auth(self.jump_host_ssh, jump_auth,
jump_host['addr'], jump_host['port'], sock)
transport = self.jump_host_ssh.get_transport()
dest_addr = (address, port)
jump_addr = (jump_host['addr'], jump_host['port'])
channel = transport.open_channel('direct-tcpip', dest_addr,
jump_addr)
self.connect_with_auth(self.ssh, auth, address, port, channel)
else:
self.connect_with_auth(self.ssh, auth, address, port, sock)
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
raise SSHConnectionError("{0}:{1}".format(address, port), ex) | python | def connect(self, auth, address, port, jump_host, jump_auth):
"""
Creates an ssh session to a remote host
:type auth: :py:class:`margaritashotgun.auth.AuthMethods`
:param auth: Authentication object
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port
"""
try:
self.target_address = address
sock = None
if jump_host is not None:
self.jump_host_ssh = paramiko.SSHClient()
self.jump_host_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.connect_with_auth(self.jump_host_ssh, jump_auth,
jump_host['addr'], jump_host['port'], sock)
transport = self.jump_host_ssh.get_transport()
dest_addr = (address, port)
jump_addr = (jump_host['addr'], jump_host['port'])
channel = transport.open_channel('direct-tcpip', dest_addr,
jump_addr)
self.connect_with_auth(self.ssh, auth, address, port, channel)
else:
self.connect_with_auth(self.ssh, auth, address, port, sock)
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
raise SSHConnectionError("{0}:{1}".format(address, port), ex) | [
"def",
"connect",
"(",
"self",
",",
"auth",
",",
"address",
",",
"port",
",",
"jump_host",
",",
"jump_auth",
")",
":",
"try",
":",
"self",
".",
"target_address",
"=",
"address",
"sock",
"=",
"None",
"if",
"jump_host",
"is",
"not",
"None",
":",
"self",
... | Creates an ssh session to a remote host
:type auth: :py:class:`margaritashotgun.auth.AuthMethods`
:param auth: Authentication object
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port | [
"Creates",
"an",
"ssh",
"session",
"to",
"a",
"remote",
"host"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_shell.py#L36-L66 | train | 211,985 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_shell.py | RemoteShell.connect_with_password | def connect_with_password(self, ssh, username, password, address, port, sock,
timeout=20):
"""
Create an ssh session to a remote host with a username and password
:type username: str
:param username: username used for ssh authentication
:type password: str
:param password: password used for ssh authentication
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port
"""
ssh.connect(username=username,
password=password,
hostname=address,
port=port,
sock=sock,
timeout=timeout) | python | def connect_with_password(self, ssh, username, password, address, port, sock,
timeout=20):
"""
Create an ssh session to a remote host with a username and password
:type username: str
:param username: username used for ssh authentication
:type password: str
:param password: password used for ssh authentication
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port
"""
ssh.connect(username=username,
password=password,
hostname=address,
port=port,
sock=sock,
timeout=timeout) | [
"def",
"connect_with_password",
"(",
"self",
",",
"ssh",
",",
"username",
",",
"password",
",",
"address",
",",
"port",
",",
"sock",
",",
"timeout",
"=",
"20",
")",
":",
"ssh",
".",
"connect",
"(",
"username",
"=",
"username",
",",
"password",
"=",
"pa... | Create an ssh session to a remote host with a username and password
:type username: str
:param username: username used for ssh authentication
:type password: str
:param password: password used for ssh authentication
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port | [
"Create",
"an",
"ssh",
"session",
"to",
"a",
"remote",
"host",
"with",
"a",
"username",
"and",
"password"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_shell.py#L86-L105 | train | 211,986 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_shell.py | RemoteShell.connect_with_key | def connect_with_key(self, ssh, username, key, address, port, sock,
timeout=20):
"""
Create an ssh session to a remote host with a username and rsa key
:type username: str
:param username: username used for ssh authentication
:type key: :py:class:`paramiko.key.RSAKey`
:param key: paramiko rsa key used for ssh authentication
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port
"""
ssh.connect(hostname=address,
port=port,
username=username,
pkey=key,
sock=sock,
timeout=timeout) | python | def connect_with_key(self, ssh, username, key, address, port, sock,
timeout=20):
"""
Create an ssh session to a remote host with a username and rsa key
:type username: str
:param username: username used for ssh authentication
:type key: :py:class:`paramiko.key.RSAKey`
:param key: paramiko rsa key used for ssh authentication
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port
"""
ssh.connect(hostname=address,
port=port,
username=username,
pkey=key,
sock=sock,
timeout=timeout) | [
"def",
"connect_with_key",
"(",
"self",
",",
"ssh",
",",
"username",
",",
"key",
",",
"address",
",",
"port",
",",
"sock",
",",
"timeout",
"=",
"20",
")",
":",
"ssh",
".",
"connect",
"(",
"hostname",
"=",
"address",
",",
"port",
"=",
"port",
",",
"... | Create an ssh session to a remote host with a username and rsa key
:type username: str
:param username: username used for ssh authentication
:type key: :py:class:`paramiko.key.RSAKey`
:param key: paramiko rsa key used for ssh authentication
:type address: str
:param address: remote server address
:type port: int
:param port: remote server port | [
"Create",
"an",
"ssh",
"session",
"to",
"a",
"remote",
"host",
"with",
"a",
"username",
"and",
"rsa",
"key"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_shell.py#L107-L126 | train | 211,987 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_shell.py | RemoteShell.execute | def execute(self, command):
"""
Executes command on remote hosts
:type command: str
:param command: command to be run on remote host
"""
try:
if self.ssh.get_transport() is not None:
logger.debug('{0}: executing "{1}"'.format(self.target_address,
command))
stdin, stdout, stderr = self.ssh.exec_command(command)
return dict(zip(['stdin', 'stdout', 'stderr'],
[stdin, stdout, stderr]))
else:
raise SSHConnectionError(self.target_address,
"ssh transport is closed")
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
logger.critical(("{0} execution failed on {1} with exception:"
"{2}".format(command, self.target_address,
ex)))
raise SSHCommandError(self.target_address, command, ex) | python | def execute(self, command):
"""
Executes command on remote hosts
:type command: str
:param command: command to be run on remote host
"""
try:
if self.ssh.get_transport() is not None:
logger.debug('{0}: executing "{1}"'.format(self.target_address,
command))
stdin, stdout, stderr = self.ssh.exec_command(command)
return dict(zip(['stdin', 'stdout', 'stderr'],
[stdin, stdout, stderr]))
else:
raise SSHConnectionError(self.target_address,
"ssh transport is closed")
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
logger.critical(("{0} execution failed on {1} with exception:"
"{2}".format(command, self.target_address,
ex)))
raise SSHCommandError(self.target_address, command, ex) | [
"def",
"execute",
"(",
"self",
",",
"command",
")",
":",
"try",
":",
"if",
"self",
".",
"ssh",
".",
"get_transport",
"(",
")",
"is",
"not",
"None",
":",
"logger",
".",
"debug",
"(",
"'{0}: executing \"{1}\"'",
".",
"format",
"(",
"self",
".",
"target_a... | Executes command on remote hosts
:type command: str
:param command: command to be run on remote host | [
"Executes",
"command",
"on",
"remote",
"hosts"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_shell.py#L136-L158 | train | 211,988 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_shell.py | RemoteShell.execute_async | def execute_async(self, command, callback=None):
"""
Executes command on remote hosts without blocking
:type command: str
:param command: command to be run on remote host
:type callback: function
:param callback: function to call when execution completes
"""
try:
logger.debug(('{0}: execute async "{1}"'
'with callback {2}'.format(self.target_address,
command,
callback)))
future = self.executor.submit(self.execute, command)
if callback is not None:
future.add_done_callback(callback)
return future
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
logger.critical(("{0} execution failed on {1} with exception:"
"{2}".format(command, self.target_address,
ex)))
raise SSHCommandError(self.target_address, command, ex) | python | def execute_async(self, command, callback=None):
"""
Executes command on remote hosts without blocking
:type command: str
:param command: command to be run on remote host
:type callback: function
:param callback: function to call when execution completes
"""
try:
logger.debug(('{0}: execute async "{1}"'
'with callback {2}'.format(self.target_address,
command,
callback)))
future = self.executor.submit(self.execute, command)
if callback is not None:
future.add_done_callback(callback)
return future
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
logger.critical(("{0} execution failed on {1} with exception:"
"{2}".format(command, self.target_address,
ex)))
raise SSHCommandError(self.target_address, command, ex) | [
"def",
"execute_async",
"(",
"self",
",",
"command",
",",
"callback",
"=",
"None",
")",
":",
"try",
":",
"logger",
".",
"debug",
"(",
"(",
"'{0}: execute async \"{1}\"'",
"'with callback {2}'",
".",
"format",
"(",
"self",
".",
"target_address",
",",
"command",... | Executes command on remote hosts without blocking
:type command: str
:param command: command to be run on remote host
:type callback: function
:param callback: function to call when execution completes | [
"Executes",
"command",
"on",
"remote",
"hosts",
"without",
"blocking"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_shell.py#L160-L183 | train | 211,989 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_shell.py | RemoteShell.decode | def decode(self, stream, encoding='utf-8'):
"""
Convert paramiko stream into a string
:type stream:
:param stream: stream to convert
:type encoding: str
:param encoding: stream encoding
"""
data = stream.read().decode(encoding).strip("\n")
if data != "":
logger.debug(('{0}: decoded "{1}" with encoding '
'{2}'.format(self.target_address, data, encoding)))
return data | python | def decode(self, stream, encoding='utf-8'):
"""
Convert paramiko stream into a string
:type stream:
:param stream: stream to convert
:type encoding: str
:param encoding: stream encoding
"""
data = stream.read().decode(encoding).strip("\n")
if data != "":
logger.debug(('{0}: decoded "{1}" with encoding '
'{2}'.format(self.target_address, data, encoding)))
return data | [
"def",
"decode",
"(",
"self",
",",
"stream",
",",
"encoding",
"=",
"'utf-8'",
")",
":",
"data",
"=",
"stream",
".",
"read",
"(",
")",
".",
"decode",
"(",
"encoding",
")",
".",
"strip",
"(",
"\"\\n\"",
")",
"if",
"data",
"!=",
"\"\"",
":",
"logger",... | Convert paramiko stream into a string
:type stream:
:param stream: stream to convert
:type encoding: str
:param encoding: stream encoding | [
"Convert",
"paramiko",
"stream",
"into",
"a",
"string"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_shell.py#L185-L198 | train | 211,990 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_shell.py | RemoteShell.upload_file | def upload_file(self, local_path, remote_path):
"""
Upload a file from the local filesystem to the remote host
:type local_path: str
:param local_path: path of local file to upload
:type remote_path: str
:param remote_path: destination path of upload on remote host
"""
logger.debug("{0}: uploading {1} to {0}:{2}".format(self.target_address,
local_path,
remote_path))
try:
sftp = paramiko.SFTPClient.from_transport(self.transport())
sftp.put(local_path, remote_path)
sftp.close()
except SSHException as ex:
logger.warn(("{0}: LiME module upload failed with exception:"
"{1}".format(self.target_address, ex))) | python | def upload_file(self, local_path, remote_path):
"""
Upload a file from the local filesystem to the remote host
:type local_path: str
:param local_path: path of local file to upload
:type remote_path: str
:param remote_path: destination path of upload on remote host
"""
logger.debug("{0}: uploading {1} to {0}:{2}".format(self.target_address,
local_path,
remote_path))
try:
sftp = paramiko.SFTPClient.from_transport(self.transport())
sftp.put(local_path, remote_path)
sftp.close()
except SSHException as ex:
logger.warn(("{0}: LiME module upload failed with exception:"
"{1}".format(self.target_address, ex))) | [
"def",
"upload_file",
"(",
"self",
",",
"local_path",
",",
"remote_path",
")",
":",
"logger",
".",
"debug",
"(",
"\"{0}: uploading {1} to {0}:{2}\"",
".",
"format",
"(",
"self",
".",
"target_address",
",",
"local_path",
",",
"remote_path",
")",
")",
"try",
":"... | Upload a file from the local filesystem to the remote host
:type local_path: str
:param local_path: path of local file to upload
:type remote_path: str
:param remote_path: destination path of upload on remote host | [
"Upload",
"a",
"file",
"from",
"the",
"local",
"filesystem",
"to",
"the",
"remote",
"host"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_shell.py#L200-L218 | train | 211,991 |
ThreatResponse/margaritashotgun | margaritashotgun/remote_shell.py | RemoteShell.cleanup | def cleanup(self):
"""
Release resources used during shell execution
"""
for future in self.futures:
future.cancel()
self.executor.shutdown(wait=10)
if self.ssh.get_transport() != None:
self.ssh.close() | python | def cleanup(self):
"""
Release resources used during shell execution
"""
for future in self.futures:
future.cancel()
self.executor.shutdown(wait=10)
if self.ssh.get_transport() != None:
self.ssh.close() | [
"def",
"cleanup",
"(",
"self",
")",
":",
"for",
"future",
"in",
"self",
".",
"futures",
":",
"future",
".",
"cancel",
"(",
")",
"self",
".",
"executor",
".",
"shutdown",
"(",
"wait",
"=",
"10",
")",
"if",
"self",
".",
"ssh",
".",
"get_transport",
"... | Release resources used during shell execution | [
"Release",
"resources",
"used",
"during",
"shell",
"execution"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/remote_shell.py#L220-L228 | train | 211,992 |
ThreatResponse/margaritashotgun | margaritashotgun/cli.py | Cli.parse_args | def parse_args(self, args):
"""
Parse arguments and return an arguments object
>>> from margaritashotgun.cli import Cli
>>> cli = CLi()
>>> cli.parse_args(sys.argv[1:])
:type args: list
:param args: list of arguments
"""
parser = argparse.ArgumentParser(
description='Remote memory aquisition wrapper for LiME')
root = parser.add_mutually_exclusive_group(required=True)
root.add_argument('-c', '--config', help='path to config.yml')
root.add_argument('--server',
help='hostname or ip of target server')
root.add_argument('--version', action='version',
version="%(prog)s {ver}".format(ver=__version__))
opts = parser.add_argument_group()
opts.add_argument('--port', help='ssh port on remote server')
opts.add_argument('--username',
help='username for ssh connection to target server')
opts.add_argument('--module',
help='path to kernel lime kernel module')
opts.add_argument('--password',
help='password for user or encrypted keyfile')
opts.add_argument('--key',
help='path to rsa key for ssh connection')
opts.add_argument('--jump-server',
help='hostname or ip of jump server')
opts.add_argument('--jump-port',
help='ssh port on jump server')
opts.add_argument('--jump-username',
help='username for ssh connection to jump server')
opts.add_argument('--jump-password',
help='password for jump-user or encrypted keyfile')
opts.add_argument('--jump-key',
help='path to rsa key for ssh connection to jump server')
opts.add_argument('--filename',
help='memory dump filename')
opts.add_argument('--repository', action='store_true',
help='enable automatic kernel module downloads')
opts.add_argument('--repository-url',
help='kernel module repository url')
opts.add_argument('--repository-manifest',
help='specify alternate repository manifest')
opts.add_argument('--gpg-no-verify', dest='gpg_verify',
action='store_false',
help='skip lime module gpg signature check')
opts.add_argument('--workers', default=1,
help=('number of workers to run in parallel,'
'default: auto acceptable values are'
'(INTEGER | "auto")'))
opts.add_argument('--verbose', action='store_true',
help='log debug messages')
opts.set_defaults(repository_manifest='primary')
opts.set_defaults(gpg_verify=True)
output = parser.add_mutually_exclusive_group(required=False)
output.add_argument('--bucket',
help='memory dump output bucket')
output.add_argument('--output-dir',
help='memory dump output directory')
log = parser.add_argument_group()
log.add_argument('--log-dir',
help='log directory')
log.add_argument('--log-prefix',
help='log file prefix')
return parser.parse_args(args) | python | def parse_args(self, args):
"""
Parse arguments and return an arguments object
>>> from margaritashotgun.cli import Cli
>>> cli = CLi()
>>> cli.parse_args(sys.argv[1:])
:type args: list
:param args: list of arguments
"""
parser = argparse.ArgumentParser(
description='Remote memory aquisition wrapper for LiME')
root = parser.add_mutually_exclusive_group(required=True)
root.add_argument('-c', '--config', help='path to config.yml')
root.add_argument('--server',
help='hostname or ip of target server')
root.add_argument('--version', action='version',
version="%(prog)s {ver}".format(ver=__version__))
opts = parser.add_argument_group()
opts.add_argument('--port', help='ssh port on remote server')
opts.add_argument('--username',
help='username for ssh connection to target server')
opts.add_argument('--module',
help='path to kernel lime kernel module')
opts.add_argument('--password',
help='password for user or encrypted keyfile')
opts.add_argument('--key',
help='path to rsa key for ssh connection')
opts.add_argument('--jump-server',
help='hostname or ip of jump server')
opts.add_argument('--jump-port',
help='ssh port on jump server')
opts.add_argument('--jump-username',
help='username for ssh connection to jump server')
opts.add_argument('--jump-password',
help='password for jump-user or encrypted keyfile')
opts.add_argument('--jump-key',
help='path to rsa key for ssh connection to jump server')
opts.add_argument('--filename',
help='memory dump filename')
opts.add_argument('--repository', action='store_true',
help='enable automatic kernel module downloads')
opts.add_argument('--repository-url',
help='kernel module repository url')
opts.add_argument('--repository-manifest',
help='specify alternate repository manifest')
opts.add_argument('--gpg-no-verify', dest='gpg_verify',
action='store_false',
help='skip lime module gpg signature check')
opts.add_argument('--workers', default=1,
help=('number of workers to run in parallel,'
'default: auto acceptable values are'
'(INTEGER | "auto")'))
opts.add_argument('--verbose', action='store_true',
help='log debug messages')
opts.set_defaults(repository_manifest='primary')
opts.set_defaults(gpg_verify=True)
output = parser.add_mutually_exclusive_group(required=False)
output.add_argument('--bucket',
help='memory dump output bucket')
output.add_argument('--output-dir',
help='memory dump output directory')
log = parser.add_argument_group()
log.add_argument('--log-dir',
help='log directory')
log.add_argument('--log-prefix',
help='log file prefix')
return parser.parse_args(args) | [
"def",
"parse_args",
"(",
"self",
",",
"args",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"'Remote memory aquisition wrapper for LiME'",
")",
"root",
"=",
"parser",
".",
"add_mutually_exclusive_group",
"(",
"required",
"=",
... | Parse arguments and return an arguments object
>>> from margaritashotgun.cli import Cli
>>> cli = CLi()
>>> cli.parse_args(sys.argv[1:])
:type args: list
:param args: list of arguments | [
"Parse",
"arguments",
"and",
"return",
"an",
"arguments",
"object"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/cli.py#L39-L111 | train | 211,993 |
ThreatResponse/margaritashotgun | margaritashotgun/cli.py | Cli.configure | def configure(self, arguments=None, config=None):
"""
Merge command line arguments, config files, and default configs
:type arguments: argparse.Namespace
:params arguments: Arguments produced by Cli.parse_args
:type config: dict
:params config: configuration dict to merge and validate
"""
if arguments is not None:
args_config = self.configure_args(arguments)
base_config = copy.deepcopy(default_config)
working_config = self.merge_config(base_config, args_config)
if config is not None:
self.validate_config(config)
base_config = copy.deepcopy(default_config)
working_config = self.merge_config(base_config, config)
# override configuration with environment variables
repo = self.get_env_default('LIME_REPOSITORY', 'disabled')
repo_url = self.get_env_default('LIME_REPOSITORY_URL',
working_config['repository']['url'])
if repo.lower() == 'enabled':
working_config['repository']['enabled'] = True
working_config['repository']['url'] = repo_url
return working_config | python | def configure(self, arguments=None, config=None):
"""
Merge command line arguments, config files, and default configs
:type arguments: argparse.Namespace
:params arguments: Arguments produced by Cli.parse_args
:type config: dict
:params config: configuration dict to merge and validate
"""
if arguments is not None:
args_config = self.configure_args(arguments)
base_config = copy.deepcopy(default_config)
working_config = self.merge_config(base_config, args_config)
if config is not None:
self.validate_config(config)
base_config = copy.deepcopy(default_config)
working_config = self.merge_config(base_config, config)
# override configuration with environment variables
repo = self.get_env_default('LIME_REPOSITORY', 'disabled')
repo_url = self.get_env_default('LIME_REPOSITORY_URL',
working_config['repository']['url'])
if repo.lower() == 'enabled':
working_config['repository']['enabled'] = True
working_config['repository']['url'] = repo_url
return working_config | [
"def",
"configure",
"(",
"self",
",",
"arguments",
"=",
"None",
",",
"config",
"=",
"None",
")",
":",
"if",
"arguments",
"is",
"not",
"None",
":",
"args_config",
"=",
"self",
".",
"configure_args",
"(",
"arguments",
")",
"base_config",
"=",
"copy",
".",
... | Merge command line arguments, config files, and default configs
:type arguments: argparse.Namespace
:params arguments: Arguments produced by Cli.parse_args
:type config: dict
:params config: configuration dict to merge and validate | [
"Merge",
"command",
"line",
"arguments",
"config",
"files",
"and",
"default",
"configs"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/cli.py#L113-L141 | train | 211,994 |
ThreatResponse/margaritashotgun | margaritashotgun/cli.py | Cli.get_env_default | def get_env_default(self, variable, default):
"""
Fetch environment variables, returning a default if not found
"""
if variable in os.environ:
env_var = os.environ[variable]
else:
env_var = default
return env_var | python | def get_env_default(self, variable, default):
"""
Fetch environment variables, returning a default if not found
"""
if variable in os.environ:
env_var = os.environ[variable]
else:
env_var = default
return env_var | [
"def",
"get_env_default",
"(",
"self",
",",
"variable",
",",
"default",
")",
":",
"if",
"variable",
"in",
"os",
".",
"environ",
":",
"env_var",
"=",
"os",
".",
"environ",
"[",
"variable",
"]",
"else",
":",
"env_var",
"=",
"default",
"return",
"env_var"
] | Fetch environment variables, returning a default if not found | [
"Fetch",
"environment",
"variables",
"returning",
"a",
"default",
"if",
"not",
"found"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/cli.py#L169-L177 | train | 211,995 |
ThreatResponse/margaritashotgun | margaritashotgun/cli.py | Cli.configure_args | def configure_args(self, arguments):
"""
Create configuration has from command line arguments
:type arguments: :py:class:`argparse.Namespace`
:params arguments: arguments produced by :py:meth:`Cli.parse_args()`
"""
module, key, config_path = self.check_file_paths(arguments.module,
arguments.key,
arguments.config)
log_dir = self.check_directory_paths(arguments.log_dir)
if arguments.repository_url is None:
url = default_config['repository']['url']
else:
url = arguments.repository_url
args_config = dict(aws=dict(bucket=arguments.bucket),
logging=dict(dir=arguments.log_dir,
prefix=arguments.log_prefix),
workers=arguments.workers,
repository=dict(enabled=arguments.repository,
url=url,
manifest=arguments.repository_manifest,
gpg_verify=arguments.gpg_verify))
if arguments.server is not None:
jump_host = None
if arguments.jump_server is not None:
if arguments.jump_port is not None:
jump_port = int(arguments.jump_port)
else:
jump_port = None
jump_host = dict(zip(jump_host_allowed_keys,
[arguments.jump_server,
jump_port,
arguments.jump_username,
arguments.jump_password,
arguments.jump_key]))
if arguments.port is not None:
port = int(arguments.port)
else:
port = None
host = dict(zip(host_allowed_keys,
[arguments.server, port, arguments.username,
arguments.password, module, key,
arguments.filename, jump_host]))
args_config['hosts'] = []
args_config['hosts'].append(host)
if config_path is not None:
try:
config = self.load_config(config_path)
self.validate_config(config)
args_config.update(config)
except YAMLError as ex:
logger.warn('Invalid yaml Format: {0}'.format(ex))
raise
except InvalidConfigurationError as ex:
logger.warn(ex)
raise
return args_config | python | def configure_args(self, arguments):
"""
Create configuration has from command line arguments
:type arguments: :py:class:`argparse.Namespace`
:params arguments: arguments produced by :py:meth:`Cli.parse_args()`
"""
module, key, config_path = self.check_file_paths(arguments.module,
arguments.key,
arguments.config)
log_dir = self.check_directory_paths(arguments.log_dir)
if arguments.repository_url is None:
url = default_config['repository']['url']
else:
url = arguments.repository_url
args_config = dict(aws=dict(bucket=arguments.bucket),
logging=dict(dir=arguments.log_dir,
prefix=arguments.log_prefix),
workers=arguments.workers,
repository=dict(enabled=arguments.repository,
url=url,
manifest=arguments.repository_manifest,
gpg_verify=arguments.gpg_verify))
if arguments.server is not None:
jump_host = None
if arguments.jump_server is not None:
if arguments.jump_port is not None:
jump_port = int(arguments.jump_port)
else:
jump_port = None
jump_host = dict(zip(jump_host_allowed_keys,
[arguments.jump_server,
jump_port,
arguments.jump_username,
arguments.jump_password,
arguments.jump_key]))
if arguments.port is not None:
port = int(arguments.port)
else:
port = None
host = dict(zip(host_allowed_keys,
[arguments.server, port, arguments.username,
arguments.password, module, key,
arguments.filename, jump_host]))
args_config['hosts'] = []
args_config['hosts'].append(host)
if config_path is not None:
try:
config = self.load_config(config_path)
self.validate_config(config)
args_config.update(config)
except YAMLError as ex:
logger.warn('Invalid yaml Format: {0}'.format(ex))
raise
except InvalidConfigurationError as ex:
logger.warn(ex)
raise
return args_config | [
"def",
"configure_args",
"(",
"self",
",",
"arguments",
")",
":",
"module",
",",
"key",
",",
"config_path",
"=",
"self",
".",
"check_file_paths",
"(",
"arguments",
".",
"module",
",",
"arguments",
".",
"key",
",",
"arguments",
".",
"config",
")",
"log_dir"... | Create configuration has from command line arguments
:type arguments: :py:class:`argparse.Namespace`
:params arguments: arguments produced by :py:meth:`Cli.parse_args()` | [
"Create",
"configuration",
"has",
"from",
"command",
"line",
"arguments"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/cli.py#L179-L244 | train | 211,996 |
ThreatResponse/margaritashotgun | margaritashotgun/cli.py | Cli.check_file_paths | def check_file_paths(self, *args):
"""
Ensure all arguments provided correspond to a file
"""
for path in enumerate(args):
path = path[1]
if path is not None:
try:
self.check_file_path(path)
except OSError as ex:
logger.warn(ex)
raise
return args | python | def check_file_paths(self, *args):
"""
Ensure all arguments provided correspond to a file
"""
for path in enumerate(args):
path = path[1]
if path is not None:
try:
self.check_file_path(path)
except OSError as ex:
logger.warn(ex)
raise
return args | [
"def",
"check_file_paths",
"(",
"self",
",",
"*",
"args",
")",
":",
"for",
"path",
"in",
"enumerate",
"(",
"args",
")",
":",
"path",
"=",
"path",
"[",
"1",
"]",
"if",
"path",
"is",
"not",
"None",
":",
"try",
":",
"self",
".",
"check_file_path",
"("... | Ensure all arguments provided correspond to a file | [
"Ensure",
"all",
"arguments",
"provided",
"correspond",
"to",
"a",
"file"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/cli.py#L246-L258 | train | 211,997 |
ThreatResponse/margaritashotgun | margaritashotgun/cli.py | Cli.check_file_path | def check_file_path(self, path):
"""
Ensure file exists at the provided path
:type path: string
:param path: path to directory to check
"""
if os.path.exists(path) is not True:
msg = "File Not Found {}".format(path)
raise OSError(msg) | python | def check_file_path(self, path):
"""
Ensure file exists at the provided path
:type path: string
:param path: path to directory to check
"""
if os.path.exists(path) is not True:
msg = "File Not Found {}".format(path)
raise OSError(msg) | [
"def",
"check_file_path",
"(",
"self",
",",
"path",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
"is",
"not",
"True",
":",
"msg",
"=",
"\"File Not Found {}\"",
".",
"format",
"(",
"path",
")",
"raise",
"OSError",
"(",
"msg",
")... | Ensure file exists at the provided path
:type path: string
:param path: path to directory to check | [
"Ensure",
"file",
"exists",
"at",
"the",
"provided",
"path"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/cli.py#L260-L269 | train | 211,998 |
ThreatResponse/margaritashotgun | margaritashotgun/cli.py | Cli.check_directory_paths | def check_directory_paths(self, *args):
"""
Ensure all arguments correspond to directories
"""
for path in enumerate(args):
path = path[1]
if path is not None:
try:
self.check_directory_path(path)
except OSError as ex:
logger.warn(ex)
raise
return args | python | def check_directory_paths(self, *args):
"""
Ensure all arguments correspond to directories
"""
for path in enumerate(args):
path = path[1]
if path is not None:
try:
self.check_directory_path(path)
except OSError as ex:
logger.warn(ex)
raise
return args | [
"def",
"check_directory_paths",
"(",
"self",
",",
"*",
"args",
")",
":",
"for",
"path",
"in",
"enumerate",
"(",
"args",
")",
":",
"path",
"=",
"path",
"[",
"1",
"]",
"if",
"path",
"is",
"not",
"None",
":",
"try",
":",
"self",
".",
"check_directory_pa... | Ensure all arguments correspond to directories | [
"Ensure",
"all",
"arguments",
"correspond",
"to",
"directories"
] | 6dee53ef267959b214953439968244cc46a19690 | https://github.com/ThreatResponse/margaritashotgun/blob/6dee53ef267959b214953439968244cc46a19690/margaritashotgun/cli.py#L271-L283 | train | 211,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.