id int32 0 252k | repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 51 19.8k | code_tokens list | docstring stringlengths 3 17.3k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 87 242 |
|---|---|---|---|---|---|---|---|---|---|---|---|
27,800 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.delete | def delete(self):
"""Delete this document"""
assert '_id' in self._document, "Can't delete documents without `_id`"
# Send delete signal
signal('delete').send(self.__class__, frames=[self])
# Delete the document
self.get_collection().delete_one({'_id': self._id})
# Send deleted signal
signal('deleted').send(self.__class__, frames=[self]) | python | def delete(self):
assert '_id' in self._document, "Can't delete documents without `_id`"
# Send delete signal
signal('delete').send(self.__class__, frames=[self])
# Delete the document
self.get_collection().delete_one({'_id': self._id})
# Send deleted signal
signal('deleted').send(self.__class__, frames=[self]) | [
"def",
"delete",
"(",
"self",
")",
":",
"assert",
"'_id'",
"in",
"self",
".",
"_document",
",",
"\"Can't delete documents without `_id`\"",
"# Send delete signal",
"signal",
"(",
"'delete'",
")",
".",
"send",
"(",
"self",
".",
"__class__",
",",
"frames",
"=",
... | Delete this document | [
"Delete",
"this",
"document"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L290-L302 |
27,801 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.insert_many | def insert_many(cls, documents):
"""Insert a list of documents"""
from mongoframes.queries import to_refs
# Ensure all documents have been converted to frames
frames = cls._ensure_frames(documents)
# Send insert signal
signal('insert').send(cls, frames=frames)
# Prepare the documents to be inserted
documents = [to_refs(f._document) for f in frames]
# Bulk insert
ids = cls.get_collection().insert_many(documents).inserted_ids
# Apply the Ids to the frames
for i, id in enumerate(ids):
frames[i]._id = id
# Send inserted signal
signal('inserted').send(cls, frames=frames)
return frames | python | def insert_many(cls, documents):
from mongoframes.queries import to_refs
# Ensure all documents have been converted to frames
frames = cls._ensure_frames(documents)
# Send insert signal
signal('insert').send(cls, frames=frames)
# Prepare the documents to be inserted
documents = [to_refs(f._document) for f in frames]
# Bulk insert
ids = cls.get_collection().insert_many(documents).inserted_ids
# Apply the Ids to the frames
for i, id in enumerate(ids):
frames[i]._id = id
# Send inserted signal
signal('inserted').send(cls, frames=frames)
return frames | [
"def",
"insert_many",
"(",
"cls",
",",
"documents",
")",
":",
"from",
"mongoframes",
".",
"queries",
"import",
"to_refs",
"# Ensure all documents have been converted to frames",
"frames",
"=",
"cls",
".",
"_ensure_frames",
"(",
"documents",
")",
"# Send insert signal",
... | Insert a list of documents | [
"Insert",
"a",
"list",
"of",
"documents"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L305-L328 |
27,802 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.update_many | def update_many(cls, documents, *fields):
"""
Update multiple documents. Optionally a specific list of fields to
update can be specified.
"""
from mongoframes.queries import to_refs
# Ensure all documents have been converted to frames
frames = cls._ensure_frames(documents)
all_count = len(documents)
assert len([f for f in frames if '_id' in f._document]) == all_count, \
"Can't update documents without `_id`s"
# Send update signal
signal('update').send(cls, frames=frames)
# Prepare the documents to be updated
# Check for selective updates
if len(fields) > 0:
documents = []
for frame in frames:
document = {'_id': frame._id}
for field in fields:
document[field] = cls._path_to_value(
field,
frame._document
)
documents.append(to_refs(document))
else:
documents = [to_refs(f._document) for f in frames]
# Update the documents
for document in documents:
_id = document.pop('_id')
cls.get_collection().update(
{'_id': _id}, {'$set': document})
# Send updated signal
signal('updated').send(cls, frames=frames) | python | def update_many(cls, documents, *fields):
from mongoframes.queries import to_refs
# Ensure all documents have been converted to frames
frames = cls._ensure_frames(documents)
all_count = len(documents)
assert len([f for f in frames if '_id' in f._document]) == all_count, \
"Can't update documents without `_id`s"
# Send update signal
signal('update').send(cls, frames=frames)
# Prepare the documents to be updated
# Check for selective updates
if len(fields) > 0:
documents = []
for frame in frames:
document = {'_id': frame._id}
for field in fields:
document[field] = cls._path_to_value(
field,
frame._document
)
documents.append(to_refs(document))
else:
documents = [to_refs(f._document) for f in frames]
# Update the documents
for document in documents:
_id = document.pop('_id')
cls.get_collection().update(
{'_id': _id}, {'$set': document})
# Send updated signal
signal('updated').send(cls, frames=frames) | [
"def",
"update_many",
"(",
"cls",
",",
"documents",
",",
"*",
"fields",
")",
":",
"from",
"mongoframes",
".",
"queries",
"import",
"to_refs",
"# Ensure all documents have been converted to frames",
"frames",
"=",
"cls",
".",
"_ensure_frames",
"(",
"documents",
")",
... | Update multiple documents. Optionally a specific list of fields to
update can be specified. | [
"Update",
"multiple",
"documents",
".",
"Optionally",
"a",
"specific",
"list",
"of",
"fields",
"to",
"update",
"can",
"be",
"specified",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L331-L371 |
27,803 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.delete_many | def delete_many(cls, documents):
"""Delete multiple documents"""
# Ensure all documents have been converted to frames
frames = cls._ensure_frames(documents)
all_count = len(documents)
assert len([f for f in frames if '_id' in f._document]) == all_count, \
"Can't delete documents without `_id`s"
# Send delete signal
signal('delete').send(cls, frames=frames)
# Prepare the documents to be deleted
ids = [f._id for f in frames]
# Delete the documents
cls.get_collection().delete_many({'_id': {'$in': ids}})
# Send deleted signal
signal('deleted').send(cls, frames=frames) | python | def delete_many(cls, documents):
# Ensure all documents have been converted to frames
frames = cls._ensure_frames(documents)
all_count = len(documents)
assert len([f for f in frames if '_id' in f._document]) == all_count, \
"Can't delete documents without `_id`s"
# Send delete signal
signal('delete').send(cls, frames=frames)
# Prepare the documents to be deleted
ids = [f._id for f in frames]
# Delete the documents
cls.get_collection().delete_many({'_id': {'$in': ids}})
# Send deleted signal
signal('deleted').send(cls, frames=frames) | [
"def",
"delete_many",
"(",
"cls",
",",
"documents",
")",
":",
"# Ensure all documents have been converted to frames",
"frames",
"=",
"cls",
".",
"_ensure_frames",
"(",
"documents",
")",
"all_count",
"=",
"len",
"(",
"documents",
")",
"assert",
"len",
"(",
"[",
"... | Delete multiple documents | [
"Delete",
"multiple",
"documents"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L374-L394 |
27,804 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame._ensure_frames | def _ensure_frames(cls, documents):
"""
Ensure all items in a list are frames by converting those that aren't.
"""
frames = []
for document in documents:
if not isinstance(document, Frame):
frames.append(cls(document))
else:
frames.append(document)
return frames | python | def _ensure_frames(cls, documents):
frames = []
for document in documents:
if not isinstance(document, Frame):
frames.append(cls(document))
else:
frames.append(document)
return frames | [
"def",
"_ensure_frames",
"(",
"cls",
",",
"documents",
")",
":",
"frames",
"=",
"[",
"]",
"for",
"document",
"in",
"documents",
":",
"if",
"not",
"isinstance",
"(",
"document",
",",
"Frame",
")",
":",
"frames",
".",
"append",
"(",
"cls",
"(",
"document... | Ensure all items in a list are frames by converting those that aren't. | [
"Ensure",
"all",
"items",
"in",
"a",
"list",
"are",
"frames",
"by",
"converting",
"those",
"that",
"aren",
"t",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L397-L407 |
27,805 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.reload | def reload(self, **kwargs):
"""Reload the document"""
frame = self.one({'_id': self._id}, **kwargs)
self._document = frame._document | python | def reload(self, **kwargs):
frame = self.one({'_id': self._id}, **kwargs)
self._document = frame._document | [
"def",
"reload",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"frame",
"=",
"self",
".",
"one",
"(",
"{",
"'_id'",
":",
"self",
".",
"_id",
"}",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"_document",
"=",
"frame",
".",
"_document"
] | Reload the document | [
"Reload",
"the",
"document"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L411-L414 |
27,806 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.count | def count(cls, filter=None, **kwargs):
"""Return a count of documents matching the filter"""
from mongoframes.queries import Condition, Group, to_refs
if isinstance(filter, (Condition, Group)):
filter = filter.to_dict()
return cls.get_collection().count(to_refs(filter), **kwargs) | python | def count(cls, filter=None, **kwargs):
from mongoframes.queries import Condition, Group, to_refs
if isinstance(filter, (Condition, Group)):
filter = filter.to_dict()
return cls.get_collection().count(to_refs(filter), **kwargs) | [
"def",
"count",
"(",
"cls",
",",
"filter",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"mongoframes",
".",
"queries",
"import",
"Condition",
",",
"Group",
",",
"to_refs",
"if",
"isinstance",
"(",
"filter",
",",
"(",
"Condition",
",",
"Group... | Return a count of documents matching the filter | [
"Return",
"a",
"count",
"of",
"documents",
"matching",
"the",
"filter"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L422-L429 |
27,807 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.ids | def ids(cls, filter=None, **kwargs):
"""Return a list of Ids for documents matching the filter"""
from mongoframes.queries import Condition, Group, to_refs
# Find the documents
if isinstance(filter, (Condition, Group)):
filter = filter.to_dict()
documents = cls.get_collection().find(
to_refs(filter),
projection={'_id': True},
**kwargs
)
return [d['_id'] for d in list(documents)] | python | def ids(cls, filter=None, **kwargs):
from mongoframes.queries import Condition, Group, to_refs
# Find the documents
if isinstance(filter, (Condition, Group)):
filter = filter.to_dict()
documents = cls.get_collection().find(
to_refs(filter),
projection={'_id': True},
**kwargs
)
return [d['_id'] for d in list(documents)] | [
"def",
"ids",
"(",
"cls",
",",
"filter",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"mongoframes",
".",
"queries",
"import",
"Condition",
",",
"Group",
",",
"to_refs",
"# Find the documents",
"if",
"isinstance",
"(",
"filter",
",",
"(",
"Con... | Return a list of Ids for documents matching the filter | [
"Return",
"a",
"list",
"of",
"Ids",
"for",
"documents",
"matching",
"the",
"filter"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L432-L446 |
27,808 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.one | def one(cls, filter=None, **kwargs):
"""Return the first document matching the filter"""
from mongoframes.queries import Condition, Group, to_refs
# Flatten the projection
kwargs['projection'], references, subs = \
cls._flatten_projection(
kwargs.get('projection', cls._default_projection)
)
# Find the document
if isinstance(filter, (Condition, Group)):
filter = filter.to_dict()
document = cls.get_collection().find_one(to_refs(filter), **kwargs)
# Make sure we found a document
if not document:
return
# Dereference the document (if required)
if references:
cls._dereference([document], references)
# Add sub-frames to the document (if required)
if subs:
cls._apply_sub_frames([document], subs)
return cls(document) | python | def one(cls, filter=None, **kwargs):
from mongoframes.queries import Condition, Group, to_refs
# Flatten the projection
kwargs['projection'], references, subs = \
cls._flatten_projection(
kwargs.get('projection', cls._default_projection)
)
# Find the document
if isinstance(filter, (Condition, Group)):
filter = filter.to_dict()
document = cls.get_collection().find_one(to_refs(filter), **kwargs)
# Make sure we found a document
if not document:
return
# Dereference the document (if required)
if references:
cls._dereference([document], references)
# Add sub-frames to the document (if required)
if subs:
cls._apply_sub_frames([document], subs)
return cls(document) | [
"def",
"one",
"(",
"cls",
",",
"filter",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"mongoframes",
".",
"queries",
"import",
"Condition",
",",
"Group",
",",
"to_refs",
"# Flatten the projection",
"kwargs",
"[",
"'projection'",
"]",
",",
"refer... | Return the first document matching the filter | [
"Return",
"the",
"first",
"document",
"matching",
"the",
"filter"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L449-L477 |
27,809 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.many | def many(cls, filter=None, **kwargs):
"""Return a list of documents matching the filter"""
from mongoframes.queries import Condition, Group, to_refs
# Flatten the projection
kwargs['projection'], references, subs = \
cls._flatten_projection(
kwargs.get('projection', cls._default_projection)
)
# Find the documents
if isinstance(filter, (Condition, Group)):
filter = filter.to_dict()
documents = list(cls.get_collection().find(to_refs(filter), **kwargs))
# Dereference the documents (if required)
if references:
cls._dereference(documents, references)
# Add sub-frames to the documents (if required)
if subs:
cls._apply_sub_frames(documents, subs)
return [cls(d) for d in documents] | python | def many(cls, filter=None, **kwargs):
from mongoframes.queries import Condition, Group, to_refs
# Flatten the projection
kwargs['projection'], references, subs = \
cls._flatten_projection(
kwargs.get('projection', cls._default_projection)
)
# Find the documents
if isinstance(filter, (Condition, Group)):
filter = filter.to_dict()
documents = list(cls.get_collection().find(to_refs(filter), **kwargs))
# Dereference the documents (if required)
if references:
cls._dereference(documents, references)
# Add sub-frames to the documents (if required)
if subs:
cls._apply_sub_frames(documents, subs)
return [cls(d) for d in documents] | [
"def",
"many",
"(",
"cls",
",",
"filter",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"mongoframes",
".",
"queries",
"import",
"Condition",
",",
"Group",
",",
"to_refs",
"# Flatten the projection",
"kwargs",
"[",
"'projection'",
"]",
",",
"refe... | Return a list of documents matching the filter | [
"Return",
"a",
"list",
"of",
"documents",
"matching",
"the",
"filter"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L480-L504 |
27,810 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame._apply_sub_frames | def _apply_sub_frames(cls, documents, subs):
"""Convert embedded documents to sub-frames for one or more documents"""
# Dereference each reference
for path, projection in subs.items():
# Get the SubFrame class we'll use to wrap the embedded document
sub = None
expect_map = False
if '$sub' in projection:
sub = projection.pop('$sub')
elif '$sub.' in projection:
sub = projection.pop('$sub.')
expect_map = True
else:
continue
# Add sub-frames to the documents
raw_subs = []
for document in documents:
value = cls._path_to_value(path, document)
if value is None:
continue
if isinstance(value, dict):
if expect_map:
# Dictionary of embedded documents
raw_subs += value.values()
for k, v in value.items():
if isinstance(v ,list):
value[k] = [
sub(u) for u in v if isinstance(u, dict)]
else:
value[k] = sub(v)
# Single embedded document
else:
raw_subs.append(value)
value = sub(value)
elif isinstance(value, list):
# List of embedded documents
raw_subs += value
value = [sub(v) for v in value if isinstance(v, dict)]
else:
raise TypeError('Not a supported sub-frame type')
child_document = document
keys = cls._path_to_keys(path)
for key in keys[:-1]:
child_document = child_document[key]
child_document[keys[-1]] = value
# Apply the projection to the list of sub frames
if projection:
sub._apply_projection(raw_subs, projection) | python | def _apply_sub_frames(cls, documents, subs):
# Dereference each reference
for path, projection in subs.items():
# Get the SubFrame class we'll use to wrap the embedded document
sub = None
expect_map = False
if '$sub' in projection:
sub = projection.pop('$sub')
elif '$sub.' in projection:
sub = projection.pop('$sub.')
expect_map = True
else:
continue
# Add sub-frames to the documents
raw_subs = []
for document in documents:
value = cls._path_to_value(path, document)
if value is None:
continue
if isinstance(value, dict):
if expect_map:
# Dictionary of embedded documents
raw_subs += value.values()
for k, v in value.items():
if isinstance(v ,list):
value[k] = [
sub(u) for u in v if isinstance(u, dict)]
else:
value[k] = sub(v)
# Single embedded document
else:
raw_subs.append(value)
value = sub(value)
elif isinstance(value, list):
# List of embedded documents
raw_subs += value
value = [sub(v) for v in value if isinstance(v, dict)]
else:
raise TypeError('Not a supported sub-frame type')
child_document = document
keys = cls._path_to_keys(path)
for key in keys[:-1]:
child_document = child_document[key]
child_document[keys[-1]] = value
# Apply the projection to the list of sub frames
if projection:
sub._apply_projection(raw_subs, projection) | [
"def",
"_apply_sub_frames",
"(",
"cls",
",",
"documents",
",",
"subs",
")",
":",
"# Dereference each reference",
"for",
"path",
",",
"projection",
"in",
"subs",
".",
"items",
"(",
")",
":",
"# Get the SubFrame class we'll use to wrap the embedded document",
"sub",
"="... | Convert embedded documents to sub-frames for one or more documents | [
"Convert",
"embedded",
"documents",
"to",
"sub",
"-",
"frames",
"for",
"one",
"or",
"more",
"documents"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L507-L563 |
27,811 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame._dereference | def _dereference(cls, documents, references):
"""Dereference one or more documents"""
# Dereference each reference
for path, projection in references.items():
# Check there is a $ref in the projection, else skip it
if '$ref' not in projection:
continue
# Collect Ids of documents to dereference
ids = set()
for document in documents:
value = cls._path_to_value(path, document)
if not value:
continue
if isinstance(value, list):
ids.update(value)
elif isinstance(value, dict):
ids.update(value.values())
else:
ids.add(value)
# Find the referenced documents
ref = projection.pop('$ref')
frames = ref.many(
{'_id': {'$in': list(ids)}},
projection=projection
)
frames = {f._id: f for f in frames}
# Add dereferenced frames to the document
for document in documents:
value = cls._path_to_value(path, document)
if not value:
continue
if isinstance(value, list):
# List of references
value = [frames[id] for id in value if id in frames]
elif isinstance(value, dict):
# Dictionary of references
value = {key: frames.get(id) for key, id in value.items()}
else:
value = frames.get(value, None)
child_document = document
keys = cls._path_to_keys(path)
for key in keys[:-1]:
child_document = child_document[key]
child_document[keys[-1]] = value | python | def _dereference(cls, documents, references):
# Dereference each reference
for path, projection in references.items():
# Check there is a $ref in the projection, else skip it
if '$ref' not in projection:
continue
# Collect Ids of documents to dereference
ids = set()
for document in documents:
value = cls._path_to_value(path, document)
if not value:
continue
if isinstance(value, list):
ids.update(value)
elif isinstance(value, dict):
ids.update(value.values())
else:
ids.add(value)
# Find the referenced documents
ref = projection.pop('$ref')
frames = ref.many(
{'_id': {'$in': list(ids)}},
projection=projection
)
frames = {f._id: f for f in frames}
# Add dereferenced frames to the document
for document in documents:
value = cls._path_to_value(path, document)
if not value:
continue
if isinstance(value, list):
# List of references
value = [frames[id] for id in value if id in frames]
elif isinstance(value, dict):
# Dictionary of references
value = {key: frames.get(id) for key, id in value.items()}
else:
value = frames.get(value, None)
child_document = document
keys = cls._path_to_keys(path)
for key in keys[:-1]:
child_document = child_document[key]
child_document[keys[-1]] = value | [
"def",
"_dereference",
"(",
"cls",
",",
"documents",
",",
"references",
")",
":",
"# Dereference each reference",
"for",
"path",
",",
"projection",
"in",
"references",
".",
"items",
"(",
")",
":",
"# Check there is a $ref in the projection, else skip it",
"if",
"'$ref... | Dereference one or more documents | [
"Dereference",
"one",
"or",
"more",
"documents"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L566-L621 |
27,812 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.listen | def listen(cls, event, func):
"""Add a callback for a signal against the class"""
signal(event).connect(func, sender=cls) | python | def listen(cls, event, func):
signal(event).connect(func, sender=cls) | [
"def",
"listen",
"(",
"cls",
",",
"event",
",",
"func",
")",
":",
"signal",
"(",
"event",
")",
".",
"connect",
"(",
"func",
",",
"sender",
"=",
"cls",
")"
] | Add a callback for a signal against the class | [
"Add",
"a",
"callback",
"for",
"a",
"signal",
"against",
"the",
"class"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L754-L756 |
27,813 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.stop_listening | def stop_listening(cls, event, func):
"""Remove a callback for a signal against the class"""
signal(event).disconnect(func, sender=cls) | python | def stop_listening(cls, event, func):
signal(event).disconnect(func, sender=cls) | [
"def",
"stop_listening",
"(",
"cls",
",",
"event",
",",
"func",
")",
":",
"signal",
"(",
"event",
")",
".",
"disconnect",
"(",
"func",
",",
"sender",
"=",
"cls",
")"
] | Remove a callback for a signal against the class | [
"Remove",
"a",
"callback",
"for",
"a",
"signal",
"against",
"the",
"class"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L759-L761 |
27,814 | GetmeUK/MongoFrames | mongoframes/frames.py | Frame.get_db | def get_db(cls):
"""Return the database for the collection"""
if cls._db:
return getattr(cls._client, cls._db)
return cls._client.get_default_database() | python | def get_db(cls):
if cls._db:
return getattr(cls._client, cls._db)
return cls._client.get_default_database() | [
"def",
"get_db",
"(",
"cls",
")",
":",
"if",
"cls",
".",
"_db",
":",
"return",
"getattr",
"(",
"cls",
".",
"_client",
",",
"cls",
".",
"_db",
")",
"return",
"cls",
".",
"_client",
".",
"get_default_database",
"(",
")"
] | Return the database for the collection | [
"Return",
"the",
"database",
"for",
"the",
"collection"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/frames.py#L771-L775 |
27,815 | GetmeUK/MongoFrames | mongoframes/factory/makers/images.py | ImageURL._default_service_formatter | def _default_service_formatter(
service_url,
width,
height,
background,
foreground,
options
):
"""Generate an image URL for a service"""
# Build the base URL
image_tmp = '{service_url}/{width}x{height}/{background}/{foreground}/'
image_url = image_tmp.format(
service_url=service_url,
width=width,
height=height,
background=background,
foreground=foreground
)
# Add any options
if options:
image_url += '?' + urlencode(options)
return image_url | python | def _default_service_formatter(
service_url,
width,
height,
background,
foreground,
options
):
# Build the base URL
image_tmp = '{service_url}/{width}x{height}/{background}/{foreground}/'
image_url = image_tmp.format(
service_url=service_url,
width=width,
height=height,
background=background,
foreground=foreground
)
# Add any options
if options:
image_url += '?' + urlencode(options)
return image_url | [
"def",
"_default_service_formatter",
"(",
"service_url",
",",
"width",
",",
"height",
",",
"background",
",",
"foreground",
",",
"options",
")",
":",
"# Build the base URL",
"image_tmp",
"=",
"'{service_url}/{width}x{height}/{background}/{foreground}/'",
"image_url",
"=",
... | Generate an image URL for a service | [
"Generate",
"an",
"image",
"URL",
"for",
"a",
"service"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/makers/images.py#L55-L79 |
27,816 | GetmeUK/MongoFrames | mongoframes/factory/makers/text.py | Markov._body | def _body(self, paragraphs):
"""Generate a body of text"""
body = []
for i in range(paragraphs):
paragraph = self._paragraph(random.randint(1, 10))
body.append(paragraph)
return '\n'.join(body) | python | def _body(self, paragraphs):
body = []
for i in range(paragraphs):
paragraph = self._paragraph(random.randint(1, 10))
body.append(paragraph)
return '\n'.join(body) | [
"def",
"_body",
"(",
"self",
",",
"paragraphs",
")",
":",
"body",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"paragraphs",
")",
":",
"paragraph",
"=",
"self",
".",
"_paragraph",
"(",
"random",
".",
"randint",
"(",
"1",
",",
"10",
")",
")",
"b... | Generate a body of text | [
"Generate",
"a",
"body",
"of",
"text"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/makers/text.py#L193-L200 |
27,817 | GetmeUK/MongoFrames | mongoframes/factory/makers/text.py | Markov._paragraph | def _paragraph(self, sentences):
"""Generate a paragraph"""
paragraph = []
for i in range(sentences):
sentence = self._sentence(random.randint(5, 16))
paragraph.append(sentence)
return ' '.join(paragraph) | python | def _paragraph(self, sentences):
paragraph = []
for i in range(sentences):
sentence = self._sentence(random.randint(5, 16))
paragraph.append(sentence)
return ' '.join(paragraph) | [
"def",
"_paragraph",
"(",
"self",
",",
"sentences",
")",
":",
"paragraph",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"sentences",
")",
":",
"sentence",
"=",
"self",
".",
"_sentence",
"(",
"random",
".",
"randint",
"(",
"5",
",",
"16",
")",
")"... | Generate a paragraph | [
"Generate",
"a",
"paragraph"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/makers/text.py#L202-L209 |
27,818 | GetmeUK/MongoFrames | mongoframes/factory/makers/text.py | Markov._sentence | def _sentence(self, words):
"""Generate a sentence"""
db = self.database
# Generate 2 words to start a sentence with
seed = random.randint(0, db['word_count'] - 3)
seed_word, next_word = db['words'][seed], db['words'][seed + 1]
w1, w2 = seed_word, next_word
# Generate the complete sentence
sentence = []
for i in range(0, words - 1):
sentence.append(w1)
w1, w2 = w2, random.choice(db['freqs'][(w1, w2)])
sentence.append(w2)
# Make the sentence respectable
sentence = ' '.join(sentence)
# Capitalize the sentence
sentence = sentence.capitalize()
# Remove additional sentence ending puntuation
sentence = sentence.replace('.', '')
sentence = sentence.replace('!', '')
sentence = sentence.replace('?', '')
sentence = sentence.replace(':', '')
# Remove quote tags
sentence = sentence.replace('.', '')
sentence = sentence.replace('!', '')
sentence = sentence.replace('?', '')
sentence = sentence.replace(':', '')
sentence = sentence.replace('"', '')
# If the last character is not an alphanumeric remove it
sentence = re.sub('[^a-zA-Z0-9]$', '', sentence)
# Remove excess space
sentence = re.sub('\s+', ' ', sentence)
# Add a full stop
sentence += '.'
return sentence | python | def _sentence(self, words):
db = self.database
# Generate 2 words to start a sentence with
seed = random.randint(0, db['word_count'] - 3)
seed_word, next_word = db['words'][seed], db['words'][seed + 1]
w1, w2 = seed_word, next_word
# Generate the complete sentence
sentence = []
for i in range(0, words - 1):
sentence.append(w1)
w1, w2 = w2, random.choice(db['freqs'][(w1, w2)])
sentence.append(w2)
# Make the sentence respectable
sentence = ' '.join(sentence)
# Capitalize the sentence
sentence = sentence.capitalize()
# Remove additional sentence ending puntuation
sentence = sentence.replace('.', '')
sentence = sentence.replace('!', '')
sentence = sentence.replace('?', '')
sentence = sentence.replace(':', '')
# Remove quote tags
sentence = sentence.replace('.', '')
sentence = sentence.replace('!', '')
sentence = sentence.replace('?', '')
sentence = sentence.replace(':', '')
sentence = sentence.replace('"', '')
# If the last character is not an alphanumeric remove it
sentence = re.sub('[^a-zA-Z0-9]$', '', sentence)
# Remove excess space
sentence = re.sub('\s+', ' ', sentence)
# Add a full stop
sentence += '.'
return sentence | [
"def",
"_sentence",
"(",
"self",
",",
"words",
")",
":",
"db",
"=",
"self",
".",
"database",
"# Generate 2 words to start a sentence with",
"seed",
"=",
"random",
".",
"randint",
"(",
"0",
",",
"db",
"[",
"'word_count'",
"]",
"-",
"3",
")",
"seed_word",
",... | Generate a sentence | [
"Generate",
"a",
"sentence"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/makers/text.py#L211-L255 |
27,819 | GetmeUK/MongoFrames | mongoframes/factory/makers/text.py | Markov.init_word_db | def init_word_db(cls, name, text):
"""Initialize a database of words for the maker with the given name"""
# Prep the words
text = text.replace('\n', ' ').replace('\r', ' ')
words = [w.strip() for w in text.split(' ') if w.strip()]
assert len(words) > 2, \
'Database text sources must contain 3 or more words.'
# Build the database
freqs = {}
for i in range(len(words) - 2):
# Create a triplet from the current word
w1 = words[i]
w2 = words[i + 1]
w3 = words[i + 2]
# Add the triplet to the database
key = (w1, w2)
if key in freqs:
freqs[key].append(w3)
else:
freqs[key] = [w3]
# Store the database so it can be used
cls._dbs[name] = {
'freqs': freqs,
'words': words,
'word_count': len(words) - 2
} | python | def init_word_db(cls, name, text):
# Prep the words
text = text.replace('\n', ' ').replace('\r', ' ')
words = [w.strip() for w in text.split(' ') if w.strip()]
assert len(words) > 2, \
'Database text sources must contain 3 or more words.'
# Build the database
freqs = {}
for i in range(len(words) - 2):
# Create a triplet from the current word
w1 = words[i]
w2 = words[i + 1]
w3 = words[i + 2]
# Add the triplet to the database
key = (w1, w2)
if key in freqs:
freqs[key].append(w3)
else:
freqs[key] = [w3]
# Store the database so it can be used
cls._dbs[name] = {
'freqs': freqs,
'words': words,
'word_count': len(words) - 2
} | [
"def",
"init_word_db",
"(",
"cls",
",",
"name",
",",
"text",
")",
":",
"# Prep the words",
"text",
"=",
"text",
".",
"replace",
"(",
"'\\n'",
",",
"' '",
")",
".",
"replace",
"(",
"'\\r'",
",",
"' '",
")",
"words",
"=",
"[",
"w",
".",
"strip",
"(",... | Initialize a database of words for the maker with the given name | [
"Initialize",
"a",
"database",
"of",
"words",
"for",
"the",
"maker",
"with",
"the",
"given",
"name"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/makers/text.py#L258-L288 |
27,820 | GetmeUK/MongoFrames | mongoframes/factory/makers/selections.py | SomeOf.p | def p(i, sample_size, weights):
"""
Given a weighted set and sample size return the probabilty that the
weight `i` will be present in the sample.
Created to test the output of the `SomeOf` maker class. The math was
provided by Andy Blackshaw - thank you dad :)
"""
# Determine the initial pick values
weight_i = weights[i]
weights_sum = sum(weights)
# Build a list of weights that don't contain the weight `i`. This list will
# be used to build the possible picks before weight `i`.
other_weights = list(weights)
del other_weights[i]
# Calculate the probability
probability_of_i = 0
for picks in range(0, sample_size):
# Build the list of possible permutations for this pick in the sample
permutations = list(itertools.permutations(other_weights, picks))
# Calculate the probability for this permutation
permutation_probabilities = []
for permutation in permutations:
# Calculate the probability for each pick in the permutation
pick_probabilities = []
pick_weight_sum = weights_sum
for pick in permutation:
pick_probabilities.append(pick / pick_weight_sum)
# Each time we pick we update the sum of the weight the next
# pick is from.
pick_weight_sum -= pick
# Add the probability of picking i as the last pick
pick_probabilities += [weight_i / pick_weight_sum]
# Multiply all the probabilities for the permutation together
permutation_probability = reduce(
lambda x, y: x * y, pick_probabilities
)
permutation_probabilities.append(permutation_probability)
# Add together all the probabilities for all permutations together
probability_of_i += sum(permutation_probabilities)
return probability_of_i | python | def p(i, sample_size, weights):
# Determine the initial pick values
weight_i = weights[i]
weights_sum = sum(weights)
# Build a list of weights that don't contain the weight `i`. This list will
# be used to build the possible picks before weight `i`.
other_weights = list(weights)
del other_weights[i]
# Calculate the probability
probability_of_i = 0
for picks in range(0, sample_size):
# Build the list of possible permutations for this pick in the sample
permutations = list(itertools.permutations(other_weights, picks))
# Calculate the probability for this permutation
permutation_probabilities = []
for permutation in permutations:
# Calculate the probability for each pick in the permutation
pick_probabilities = []
pick_weight_sum = weights_sum
for pick in permutation:
pick_probabilities.append(pick / pick_weight_sum)
# Each time we pick we update the sum of the weight the next
# pick is from.
pick_weight_sum -= pick
# Add the probability of picking i as the last pick
pick_probabilities += [weight_i / pick_weight_sum]
# Multiply all the probabilities for the permutation together
permutation_probability = reduce(
lambda x, y: x * y, pick_probabilities
)
permutation_probabilities.append(permutation_probability)
# Add together all the probabilities for all permutations together
probability_of_i += sum(permutation_probabilities)
return probability_of_i | [
"def",
"p",
"(",
"i",
",",
"sample_size",
",",
"weights",
")",
":",
"# Determine the initial pick values",
"weight_i",
"=",
"weights",
"[",
"i",
"]",
"weights_sum",
"=",
"sum",
"(",
"weights",
")",
"# Build a list of weights that don't contain the weight `i`. This list ... | Given a weighted set and sample size return the probabilty that the
weight `i` will be present in the sample.
Created to test the output of the `SomeOf` maker class. The math was
provided by Andy Blackshaw - thank you dad :) | [
"Given",
"a",
"weighted",
"set",
"and",
"sample",
"size",
"return",
"the",
"probabilty",
"that",
"the",
"weight",
"i",
"will",
"be",
"present",
"in",
"the",
"sample",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/makers/selections.py#L231-L283 |
27,821 | GetmeUK/MongoFrames | mongoframes/factory/makers/__init__.py | Faker.get_fake | def get_fake(locale=None):
"""Return a shared faker factory used to generate fake data"""
if locale is None:
locale = Faker.default_locale
if not hasattr(Maker, '_fake_' + locale):
Faker._fake = faker.Factory.create(locale)
return Faker._fake | python | def get_fake(locale=None):
if locale is None:
locale = Faker.default_locale
if not hasattr(Maker, '_fake_' + locale):
Faker._fake = faker.Factory.create(locale)
return Faker._fake | [
"def",
"get_fake",
"(",
"locale",
"=",
"None",
")",
":",
"if",
"locale",
"is",
"None",
":",
"locale",
"=",
"Faker",
".",
"default_locale",
"if",
"not",
"hasattr",
"(",
"Maker",
",",
"'_fake_'",
"+",
"locale",
")",
":",
"Faker",
".",
"_fake",
"=",
"fa... | Return a shared faker factory used to generate fake data | [
"Return",
"a",
"shared",
"faker",
"factory",
"used",
"to",
"generate",
"fake",
"data"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/makers/__init__.py#L127-L134 |
27,822 | GetmeUK/MongoFrames | mongoframes/factory/makers/__init__.py | Unique._get_unique | def _get_unique(self, *args):
"""Generate a unique value using the assigned maker"""
# Generate a unique values
value = ''
attempts = 0
while True:
attempts += 1
value = self._maker(*args)
if value not in self._used_values:
break
assert attempts < self._max_attempts, \
'Too many attempts to generate a unique value'
# Add the value to the set of used values
self._used_values.add(value)
return value | python | def _get_unique(self, *args):
# Generate a unique values
value = ''
attempts = 0
while True:
attempts += 1
value = self._maker(*args)
if value not in self._used_values:
break
assert attempts < self._max_attempts, \
'Too many attempts to generate a unique value'
# Add the value to the set of used values
self._used_values.add(value)
return value | [
"def",
"_get_unique",
"(",
"self",
",",
"*",
"args",
")",
":",
"# Generate a unique values",
"value",
"=",
"''",
"attempts",
"=",
"0",
"while",
"True",
":",
"attempts",
"+=",
"1",
"value",
"=",
"self",
".",
"_maker",
"(",
"*",
"args",
")",
"if",
"value... | Generate a unique value using the assigned maker | [
"Generate",
"a",
"unique",
"value",
"using",
"the",
"assigned",
"maker"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/makers/__init__.py#L300-L318 |
27,823 | GetmeUK/MongoFrames | mongoframes/factory/blueprints.py | Blueprint.assemble | def assemble(cls):
"""Assemble a single document using the blueprint"""
document = {}
for field_name, maker in cls._instructions.items():
with maker.target(document):
document[field_name] = maker()
return document | python | def assemble(cls):
document = {}
for field_name, maker in cls._instructions.items():
with maker.target(document):
document[field_name] = maker()
return document | [
"def",
"assemble",
"(",
"cls",
")",
":",
"document",
"=",
"{",
"}",
"for",
"field_name",
",",
"maker",
"in",
"cls",
".",
"_instructions",
".",
"items",
"(",
")",
":",
"with",
"maker",
".",
"target",
"(",
"document",
")",
":",
"document",
"[",
"field_... | Assemble a single document using the blueprint | [
"Assemble",
"a",
"single",
"document",
"using",
"the",
"blueprint"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/blueprints.py#L75-L81 |
27,824 | GetmeUK/MongoFrames | mongoframes/factory/blueprints.py | Blueprint.finish | def finish(cls, document):
"""
Take a assembled document and convert all assembled values to
finished values.
"""
target_document = {}
document_copy = {}
for field_name, value in document.items():
maker = cls._instructions[field_name]
target_document = document.copy()
with maker.target(target_document):
document_copy[field_name] = maker(value)
target_document[field_name] = document_copy[field_name]
return document_copy | python | def finish(cls, document):
target_document = {}
document_copy = {}
for field_name, value in document.items():
maker = cls._instructions[field_name]
target_document = document.copy()
with maker.target(target_document):
document_copy[field_name] = maker(value)
target_document[field_name] = document_copy[field_name]
return document_copy | [
"def",
"finish",
"(",
"cls",
",",
"document",
")",
":",
"target_document",
"=",
"{",
"}",
"document_copy",
"=",
"{",
"}",
"for",
"field_name",
",",
"value",
"in",
"document",
".",
"items",
"(",
")",
":",
"maker",
"=",
"cls",
".",
"_instructions",
"[",
... | Take a assembled document and convert all assembled values to
finished values. | [
"Take",
"a",
"assembled",
"document",
"and",
"convert",
"all",
"assembled",
"values",
"to",
"finished",
"values",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/blueprints.py#L84-L97 |
27,825 | GetmeUK/MongoFrames | mongoframes/factory/blueprints.py | Blueprint.reassemble | def reassemble(cls, fields, document):
"""
Take a previously assembled document and reassemble the given set of
fields for it in place.
"""
for field_name in cls._instructions:
if field_name in fields:
maker = cls._instructions[field_name]
with maker.target(document):
document[field_name] = maker() | python | def reassemble(cls, fields, document):
for field_name in cls._instructions:
if field_name in fields:
maker = cls._instructions[field_name]
with maker.target(document):
document[field_name] = maker() | [
"def",
"reassemble",
"(",
"cls",
",",
"fields",
",",
"document",
")",
":",
"for",
"field_name",
"in",
"cls",
".",
"_instructions",
":",
"if",
"field_name",
"in",
"fields",
":",
"maker",
"=",
"cls",
".",
"_instructions",
"[",
"field_name",
"]",
"with",
"m... | Take a previously assembled document and reassemble the given set of
fields for it in place. | [
"Take",
"a",
"previously",
"assembled",
"document",
"and",
"reassemble",
"the",
"given",
"set",
"of",
"fields",
"for",
"it",
"in",
"place",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/blueprints.py#L100-L109 |
27,826 | GetmeUK/MongoFrames | snippets/comparable.py | ChangeLogEntry.is_diff | def is_diff(self):
"""Return True if there are any differences logged"""
if not isinstance(self.details, dict):
return False
for key in ['additions', 'updates', 'deletions']:
if self.details.get(key, None):
return True
return False | python | def is_diff(self):
if not isinstance(self.details, dict):
return False
for key in ['additions', 'updates', 'deletions']:
if self.details.get(key, None):
return True
return False | [
"def",
"is_diff",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"details",
",",
"dict",
")",
":",
"return",
"False",
"for",
"key",
"in",
"[",
"'additions'",
",",
"'updates'",
",",
"'deletions'",
"]",
":",
"if",
"self",
".",
"det... | Return True if there are any differences logged | [
"Return",
"True",
"if",
"there",
"are",
"any",
"differences",
"logged"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/comparable.py#L73-L82 |
27,827 | GetmeUK/MongoFrames | snippets/comparable.py | ChangeLogEntry.diff_to_html | def diff_to_html(cls, details):
"""Return an entry's details in HTML format"""
changes = []
# Check that there are details to convert to HMTL
if not details:
return ''
def _frame(value):
"""
Handle converted `Frame` references where the human identifier is
stored against the `_str` key.
"""
if isinstance(value, dict) and '_str' in value:
return value['_str']
elif isinstance(value, list):
return ', '.join([_frame(v) for v in value])
return str(value)
# Additions
fields = sorted(details.get('additions', {}))
for field in fields:
new_value = _frame(details['additions'][field])
if isinstance(new_value, list):
new_value = ', '.join([_frame(v) for v in new_value])
change = cls._templates['add'].format(
field=field,
new_value=new_value
)
changes.append(change)
# Updates
fields = sorted(details.get('updates', {}))
for field in fields:
original_value = _frame(details['updates'][field][0])
if isinstance(original_value, list):
original_value = ', '.join([_frame(v) for v in original_value])
new_value = _frame(details['updates'][field][1])
if isinstance(new_value, list):
new_value = ', '.join([_frame(v) for v in new_value])
change = cls._templates['update'].format(
field=field,
original_value=original_value,
new_value=new_value
)
changes.append(change)
# Deletions
fields = sorted(details.get('deletions', {}))
for field in fields:
original_value = _frame(details['deletions'][field])
if isinstance(original_value, list):
original_value = ', '.join([_frame(v) for v in original_value])
change = cls._templates['delete'].format(
field=field,
original_value=original_value
)
changes.append(change)
return '\n'.join(changes) | python | def diff_to_html(cls, details):
changes = []
# Check that there are details to convert to HMTL
if not details:
return ''
def _frame(value):
"""
Handle converted `Frame` references where the human identifier is
stored against the `_str` key.
"""
if isinstance(value, dict) and '_str' in value:
return value['_str']
elif isinstance(value, list):
return ', '.join([_frame(v) for v in value])
return str(value)
# Additions
fields = sorted(details.get('additions', {}))
for field in fields:
new_value = _frame(details['additions'][field])
if isinstance(new_value, list):
new_value = ', '.join([_frame(v) for v in new_value])
change = cls._templates['add'].format(
field=field,
new_value=new_value
)
changes.append(change)
# Updates
fields = sorted(details.get('updates', {}))
for field in fields:
original_value = _frame(details['updates'][field][0])
if isinstance(original_value, list):
original_value = ', '.join([_frame(v) for v in original_value])
new_value = _frame(details['updates'][field][1])
if isinstance(new_value, list):
new_value = ', '.join([_frame(v) for v in new_value])
change = cls._templates['update'].format(
field=field,
original_value=original_value,
new_value=new_value
)
changes.append(change)
# Deletions
fields = sorted(details.get('deletions', {}))
for field in fields:
original_value = _frame(details['deletions'][field])
if isinstance(original_value, list):
original_value = ', '.join([_frame(v) for v in original_value])
change = cls._templates['delete'].format(
field=field,
original_value=original_value
)
changes.append(change)
return '\n'.join(changes) | [
"def",
"diff_to_html",
"(",
"cls",
",",
"details",
")",
":",
"changes",
"=",
"[",
"]",
"# Check that there are details to convert to HMTL",
"if",
"not",
"details",
":",
"return",
"''",
"def",
"_frame",
"(",
"value",
")",
":",
"\"\"\"\n Handle converted `F... | Return an entry's details in HTML format | [
"Return",
"an",
"entry",
"s",
"details",
"in",
"HTML",
"format"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/comparable.py#L151-L214 |
27,828 | GetmeUK/MongoFrames | snippets/comparable.py | ChangeLogEntry.diff_safe | def diff_safe(cls, value):
"""Return a value that can be safely stored as a diff"""
if isinstance(value, Frame):
return {'_str': str(value), '_id': value._id}
elif isinstance(value, (list, tuple)):
return [cls.diff_safe(v) for v in value]
return value | python | def diff_safe(cls, value):
if isinstance(value, Frame):
return {'_str': str(value), '_id': value._id}
elif isinstance(value, (list, tuple)):
return [cls.diff_safe(v) for v in value]
return value | [
"def",
"diff_safe",
"(",
"cls",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"Frame",
")",
":",
"return",
"{",
"'_str'",
":",
"str",
"(",
"value",
")",
",",
"'_id'",
":",
"value",
".",
"_id",
"}",
"elif",
"isinstance",
"(",
"value... | Return a value that can be safely stored as a diff | [
"Return",
"a",
"value",
"that",
"can",
"be",
"safely",
"stored",
"as",
"a",
"diff"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/comparable.py#L217-L223 |
27,829 | GetmeUK/MongoFrames | snippets/comparable.py | ComparableFrame.comparable | def comparable(self):
"""Return a dictionary that can be compared"""
document_dict = self.compare_safe(self._document)
# Remove uncompared fields
self._remove_keys(document_dict, self._uncompared_fields)
# Remove any empty values
clean_document_dict = {}
for k, v in document_dict.items():
if not v and not isinstance(v, (int, float)):
continue
clean_document_dict[k] = v
# Convert any referenced fields to Frames
for ref_field, ref_cls in self._compared_refs.items():
ref = getattr(self, ref_field)
if not ref:
continue
# Check for fields which contain a list of references
if isinstance(ref, list):
if isinstance(ref[0], Frame):
continue
# Dereference the list of reference IDs
setattr(
clean_document_dict,
ref_field,
ref_cls.many(In(Q._id, ref))
)
else:
if isinstance(ref, Frame):
continue
# Dereference the reference ID
setattr(
clean_document_dict,
ref_field,
ref_cls.byId(ref)
)
return clean_document_dict | python | def comparable(self):
document_dict = self.compare_safe(self._document)
# Remove uncompared fields
self._remove_keys(document_dict, self._uncompared_fields)
# Remove any empty values
clean_document_dict = {}
for k, v in document_dict.items():
if not v and not isinstance(v, (int, float)):
continue
clean_document_dict[k] = v
# Convert any referenced fields to Frames
for ref_field, ref_cls in self._compared_refs.items():
ref = getattr(self, ref_field)
if not ref:
continue
# Check for fields which contain a list of references
if isinstance(ref, list):
if isinstance(ref[0], Frame):
continue
# Dereference the list of reference IDs
setattr(
clean_document_dict,
ref_field,
ref_cls.many(In(Q._id, ref))
)
else:
if isinstance(ref, Frame):
continue
# Dereference the reference ID
setattr(
clean_document_dict,
ref_field,
ref_cls.byId(ref)
)
return clean_document_dict | [
"def",
"comparable",
"(",
"self",
")",
":",
"document_dict",
"=",
"self",
".",
"compare_safe",
"(",
"self",
".",
"_document",
")",
"# Remove uncompared fields",
"self",
".",
"_remove_keys",
"(",
"document_dict",
",",
"self",
".",
"_uncompared_fields",
")",
"# Re... | Return a dictionary that can be compared | [
"Return",
"a",
"dictionary",
"that",
"can",
"be",
"compared"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/comparable.py#L265-L308 |
27,830 | GetmeUK/MongoFrames | snippets/comparable.py | ComparableFrame.logged_delete | def logged_delete(self, user):
"""Delete the document and log the event in the change log"""
self.delete()
# Log the change
entry = ChangeLogEntry({
'type': 'DELETED',
'documents': [self],
'user': user
})
entry.insert()
return entry | python | def logged_delete(self, user):
self.delete()
# Log the change
entry = ChangeLogEntry({
'type': 'DELETED',
'documents': [self],
'user': user
})
entry.insert()
return entry | [
"def",
"logged_delete",
"(",
"self",
",",
"user",
")",
":",
"self",
".",
"delete",
"(",
")",
"# Log the change",
"entry",
"=",
"ChangeLogEntry",
"(",
"{",
"'type'",
":",
"'DELETED'",
",",
"'documents'",
":",
"[",
"self",
"]",
",",
"'user'",
":",
"user",
... | Delete the document and log the event in the change log | [
"Delete",
"the",
"document",
"and",
"log",
"the",
"event",
"in",
"the",
"change",
"log"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/comparable.py#L310-L323 |
27,831 | GetmeUK/MongoFrames | snippets/comparable.py | ComparableFrame.logged_insert | def logged_insert(self, user):
"""Create and insert the document and log the event in the change log"""
# Insert the frame's document
self.insert()
# Log the insert
entry = ChangeLogEntry({
'type': 'ADDED',
'documents': [self],
'user': user
})
entry.insert()
return entry | python | def logged_insert(self, user):
# Insert the frame's document
self.insert()
# Log the insert
entry = ChangeLogEntry({
'type': 'ADDED',
'documents': [self],
'user': user
})
entry.insert()
return entry | [
"def",
"logged_insert",
"(",
"self",
",",
"user",
")",
":",
"# Insert the frame's document",
"self",
".",
"insert",
"(",
")",
"# Log the insert",
"entry",
"=",
"ChangeLogEntry",
"(",
"{",
"'type'",
":",
"'ADDED'",
",",
"'documents'",
":",
"[",
"self",
"]",
"... | Create and insert the document and log the event in the change log | [
"Create",
"and",
"insert",
"the",
"document",
"and",
"log",
"the",
"event",
"in",
"the",
"change",
"log"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/comparable.py#L325-L339 |
27,832 | GetmeUK/MongoFrames | snippets/comparable.py | ComparableFrame.logged_update | def logged_update(self, user, data, *fields):
"""
Update the document with the dictionary of data provided and log the
event in the change log.
"""
# Get a copy of the frames comparable data before the update
original = self.comparable
# Update the frame
_fields = fields
if len(fields) == 0:
_fields = data.keys()
for field in _fields:
if field in data:
setattr(self, field, data[field])
self.update(*fields)
# Create an entry and perform a diff
entry = ChangeLogEntry({
'type': 'UPDATED',
'documents': [self],
'user': user
})
entry.add_diff(original, self.comparable)
# Check there's a change to apply/log
if not entry.is_diff:
return
entry.insert()
return entry | python | def logged_update(self, user, data, *fields):
# Get a copy of the frames comparable data before the update
original = self.comparable
# Update the frame
_fields = fields
if len(fields) == 0:
_fields = data.keys()
for field in _fields:
if field in data:
setattr(self, field, data[field])
self.update(*fields)
# Create an entry and perform a diff
entry = ChangeLogEntry({
'type': 'UPDATED',
'documents': [self],
'user': user
})
entry.add_diff(original, self.comparable)
# Check there's a change to apply/log
if not entry.is_diff:
return
entry.insert()
return entry | [
"def",
"logged_update",
"(",
"self",
",",
"user",
",",
"data",
",",
"*",
"fields",
")",
":",
"# Get a copy of the frames comparable data before the update",
"original",
"=",
"self",
".",
"comparable",
"# Update the frame",
"_fields",
"=",
"fields",
"if",
"len",
"(",... | Update the document with the dictionary of data provided and log the
event in the change log. | [
"Update",
"the",
"document",
"with",
"the",
"dictionary",
"of",
"data",
"provided",
"and",
"log",
"the",
"event",
"in",
"the",
"change",
"log",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/comparable.py#L341-L374 |
27,833 | GetmeUK/MongoFrames | snippets/comparable.py | ComparableFrame.compare_safe | def compare_safe(cls, value):
"""Return a value that can be safely compared"""
# Date
if type(value) == date:
return str(value)
# Lists
elif isinstance(value, (list, tuple)):
return [cls.compare_safe(v) for v in value]
# Dictionaries
elif isinstance(value, dict):
return {k: cls.compare_safe(v) for k, v in value.items()}
return value | python | def compare_safe(cls, value):
# Date
if type(value) == date:
return str(value)
# Lists
elif isinstance(value, (list, tuple)):
return [cls.compare_safe(v) for v in value]
# Dictionaries
elif isinstance(value, dict):
return {k: cls.compare_safe(v) for k, v in value.items()}
return value | [
"def",
"compare_safe",
"(",
"cls",
",",
"value",
")",
":",
"# Date",
"if",
"type",
"(",
"value",
")",
"==",
"date",
":",
"return",
"str",
"(",
"value",
")",
"# Lists",
"elif",
"isinstance",
"(",
"value",
",",
"(",
"list",
",",
"tuple",
")",
")",
":... | Return a value that can be safely compared | [
"Return",
"a",
"value",
"that",
"can",
"be",
"safely",
"compared"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/comparable.py#L377-L392 |
27,834 | GetmeUK/MongoFrames | mongoframes/queries.py | ElemMatch | def ElemMatch(q, *conditions):
"""
The ElemMatch operator matches documents that contain an array field with at
least one element that matches all the specified query criteria.
"""
new_condition = {}
for condition in conditions:
deep_merge(condition.to_dict(), new_condition)
return Condition(q._path, new_condition, '$elemMatch') | python | def ElemMatch(q, *conditions):
new_condition = {}
for condition in conditions:
deep_merge(condition.to_dict(), new_condition)
return Condition(q._path, new_condition, '$elemMatch') | [
"def",
"ElemMatch",
"(",
"q",
",",
"*",
"conditions",
")",
":",
"new_condition",
"=",
"{",
"}",
"for",
"condition",
"in",
"conditions",
":",
"deep_merge",
"(",
"condition",
".",
"to_dict",
"(",
")",
",",
"new_condition",
")",
"return",
"Condition",
"(",
... | The ElemMatch operator matches documents that contain an array field with at
least one element that matches all the specified query criteria. | [
"The",
"ElemMatch",
"operator",
"matches",
"documents",
"that",
"contain",
"an",
"array",
"field",
"with",
"at",
"least",
"one",
"element",
"that",
"matches",
"all",
"the",
"specified",
"query",
"criteria",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/queries.py#L135-L144 |
27,835 | GetmeUK/MongoFrames | mongoframes/queries.py | SortBy | def SortBy(*qs):
"""Convert a list of Q objects into list of sort instructions"""
sort = []
for q in qs:
if q._path.endswith('.desc'):
sort.append((q._path[:-5], DESCENDING))
else:
sort.append((q._path, ASCENDING))
return sort | python | def SortBy(*qs):
sort = []
for q in qs:
if q._path.endswith('.desc'):
sort.append((q._path[:-5], DESCENDING))
else:
sort.append((q._path, ASCENDING))
return sort | [
"def",
"SortBy",
"(",
"*",
"qs",
")",
":",
"sort",
"=",
"[",
"]",
"for",
"q",
"in",
"qs",
":",
"if",
"q",
".",
"_path",
".",
"endswith",
"(",
"'.desc'",
")",
":",
"sort",
".",
"append",
"(",
"(",
"q",
".",
"_path",
"[",
":",
"-",
"5",
"]",
... | Convert a list of Q objects into list of sort instructions | [
"Convert",
"a",
"list",
"of",
"Q",
"objects",
"into",
"list",
"of",
"sort",
"instructions"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/queries.py#L249-L258 |
27,836 | GetmeUK/MongoFrames | mongoframes/queries.py | deep_merge | def deep_merge(source, dest):
"""
Deep merges source dict into dest dict.
This code was taken directly from the mongothon project:
https://github.com/gamechanger/mongothon/tree/master/mongothon
"""
for key, value in source.items():
if key in dest:
if isinstance(value, dict) and isinstance(dest[key], dict):
deep_merge(value, dest[key])
continue
elif isinstance(value, list) and isinstance(dest[key], list):
for item in value:
if item not in dest[key]:
dest[key].append(item)
continue
dest[key] = value | python | def deep_merge(source, dest):
for key, value in source.items():
if key in dest:
if isinstance(value, dict) and isinstance(dest[key], dict):
deep_merge(value, dest[key])
continue
elif isinstance(value, list) and isinstance(dest[key], list):
for item in value:
if item not in dest[key]:
dest[key].append(item)
continue
dest[key] = value | [
"def",
"deep_merge",
"(",
"source",
",",
"dest",
")",
":",
"for",
"key",
",",
"value",
"in",
"source",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"dest",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
"and",
"isinstance",
"(",
"dest"... | Deep merges source dict into dest dict.
This code was taken directly from the mongothon project:
https://github.com/gamechanger/mongothon/tree/master/mongothon | [
"Deep",
"merges",
"source",
"dict",
"into",
"dest",
"dict",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/queries.py#L263-L280 |
27,837 | GetmeUK/MongoFrames | mongoframes/queries.py | to_refs | def to_refs(value):
"""Convert all Frame instances within the given value to Ids"""
from mongoframes.frames import Frame, SubFrame
# Frame
if isinstance(value, Frame):
return value._id
# SubFrame
elif isinstance(value, SubFrame):
return to_refs(value._document)
# Lists
elif isinstance(value, (list, tuple)):
return [to_refs(v) for v in value]
# Dictionaries
elif isinstance(value, dict):
return {k: to_refs(v) for k, v in value.items()}
return value | python | def to_refs(value):
from mongoframes.frames import Frame, SubFrame
# Frame
if isinstance(value, Frame):
return value._id
# SubFrame
elif isinstance(value, SubFrame):
return to_refs(value._document)
# Lists
elif isinstance(value, (list, tuple)):
return [to_refs(v) for v in value]
# Dictionaries
elif isinstance(value, dict):
return {k: to_refs(v) for k, v in value.items()}
return value | [
"def",
"to_refs",
"(",
"value",
")",
":",
"from",
"mongoframes",
".",
"frames",
"import",
"Frame",
",",
"SubFrame",
"# Frame",
"if",
"isinstance",
"(",
"value",
",",
"Frame",
")",
":",
"return",
"value",
".",
"_id",
"# SubFrame",
"elif",
"isinstance",
"(",... | Convert all Frame instances within the given value to Ids | [
"Convert",
"all",
"Frame",
"instances",
"within",
"the",
"given",
"value",
"to",
"Ids"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/queries.py#L282-L302 |
27,838 | GetmeUK/MongoFrames | mongoframes/factory/__init__.py | Factory.assemble | def assemble(self, blueprint, quota):
"""Assemble a quota of documents"""
# Reset the blueprint
blueprint.reset()
# Assemble the documents
documents = []
for i in range(0, int(quota)):
documents.append(blueprint.assemble())
return documents | python | def assemble(self, blueprint, quota):
# Reset the blueprint
blueprint.reset()
# Assemble the documents
documents = []
for i in range(0, int(quota)):
documents.append(blueprint.assemble())
return documents | [
"def",
"assemble",
"(",
"self",
",",
"blueprint",
",",
"quota",
")",
":",
"# Reset the blueprint",
"blueprint",
".",
"reset",
"(",
")",
"# Assemble the documents",
"documents",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"int",
"(",
"quota",
... | Assemble a quota of documents | [
"Assemble",
"a",
"quota",
"of",
"documents"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/__init__.py#L39-L50 |
27,839 | GetmeUK/MongoFrames | mongoframes/factory/__init__.py | Factory.finish | def finish(self, blueprint, documents):
"""Finish a list of pre-assembled documents"""
# Reset the blueprint
blueprint.reset()
# Finish the documents
finished = []
for document in documents:
finished.append(blueprint.finish(document))
return finished | python | def finish(self, blueprint, documents):
# Reset the blueprint
blueprint.reset()
# Finish the documents
finished = []
for document in documents:
finished.append(blueprint.finish(document))
return finished | [
"def",
"finish",
"(",
"self",
",",
"blueprint",
",",
"documents",
")",
":",
"# Reset the blueprint",
"blueprint",
".",
"reset",
"(",
")",
"# Finish the documents",
"finished",
"=",
"[",
"]",
"for",
"document",
"in",
"documents",
":",
"finished",
".",
"append",... | Finish a list of pre-assembled documents | [
"Finish",
"a",
"list",
"of",
"pre",
"-",
"assembled",
"documents"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/__init__.py#L52-L63 |
27,840 | GetmeUK/MongoFrames | mongoframes/factory/__init__.py | Factory.populate | def populate(self, blueprint, documents):
"""Populate the database with documents"""
# Finish the documents
documents = self.finish(blueprint, documents)
# Convert the documents to frame instances
frames = []
for document in documents:
# Separate out any meta fields
meta_document = {}
for field_name in blueprint._meta_fields:
meta_document[field_name] = document[field_name]
document.pop(field_name)
# Initialize the frame
frame = blueprint.get_frame_cls()(document)
# Apply any meta fields
for key, value in meta_document.items():
setattr(frame, key, value)
frames.append(frame)
# Insert the documents
blueprint.on_fake(frames)
frames = blueprint.get_frame_cls().insert_many(frames)
blueprint.on_faked(frames)
return frames | python | def populate(self, blueprint, documents):
# Finish the documents
documents = self.finish(blueprint, documents)
# Convert the documents to frame instances
frames = []
for document in documents:
# Separate out any meta fields
meta_document = {}
for field_name in blueprint._meta_fields:
meta_document[field_name] = document[field_name]
document.pop(field_name)
# Initialize the frame
frame = blueprint.get_frame_cls()(document)
# Apply any meta fields
for key, value in meta_document.items():
setattr(frame, key, value)
frames.append(frame)
# Insert the documents
blueprint.on_fake(frames)
frames = blueprint.get_frame_cls().insert_many(frames)
blueprint.on_faked(frames)
return frames | [
"def",
"populate",
"(",
"self",
",",
"blueprint",
",",
"documents",
")",
":",
"# Finish the documents",
"documents",
"=",
"self",
".",
"finish",
"(",
"blueprint",
",",
"documents",
")",
"# Convert the documents to frame instances",
"frames",
"=",
"[",
"]",
"for",
... | Populate the database with documents | [
"Populate",
"the",
"database",
"with",
"documents"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/__init__.py#L65-L94 |
27,841 | GetmeUK/MongoFrames | mongoframes/factory/__init__.py | Factory.reassemble | def reassemble(self, blueprint, fields, documents):
"""
Reassemble the given set of fields for a list of pre-assembed documents.
NOTE: Reassembly is done in place, since the data you send the method
should be JSON type safe, if you need to retain the existing document
it is recommended that you copy them using `copy.deepcopy`.
"""
# Reset the blueprint
blueprint.reset()
# Reassemble the documents
for document in documents:
blueprint.reassemble(fields, document) | python | def reassemble(self, blueprint, fields, documents):
# Reset the blueprint
blueprint.reset()
# Reassemble the documents
for document in documents:
blueprint.reassemble(fields, document) | [
"def",
"reassemble",
"(",
"self",
",",
"blueprint",
",",
"fields",
",",
"documents",
")",
":",
"# Reset the blueprint",
"blueprint",
".",
"reset",
"(",
")",
"# Reassemble the documents",
"for",
"document",
"in",
"documents",
":",
"blueprint",
".",
"reassemble",
... | Reassemble the given set of fields for a list of pre-assembed documents.
NOTE: Reassembly is done in place, since the data you send the method
should be JSON type safe, if you need to retain the existing document
it is recommended that you copy them using `copy.deepcopy`. | [
"Reassemble",
"the",
"given",
"set",
"of",
"fields",
"for",
"a",
"list",
"of",
"pre",
"-",
"assembed",
"documents",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/mongoframes/factory/__init__.py#L96-L110 |
27,842 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.can_publish | def can_publish(self):
"""
Return True if there is a draft version of the document that's ready to
be published.
"""
with self.published_context():
published = self.one(
Q._uid == self._uid,
projection={'revision': True}
)
if not published:
return True
with self.draft_context():
draft = self.one(Q._uid == self._uid, projection={'revision': True})
return draft.revision > published.revision | python | def can_publish(self):
with self.published_context():
published = self.one(
Q._uid == self._uid,
projection={'revision': True}
)
if not published:
return True
with self.draft_context():
draft = self.one(Q._uid == self._uid, projection={'revision': True})
return draft.revision > published.revision | [
"def",
"can_publish",
"(",
"self",
")",
":",
"with",
"self",
".",
"published_context",
"(",
")",
":",
"published",
"=",
"self",
".",
"one",
"(",
"Q",
".",
"_uid",
"==",
"self",
".",
"_uid",
",",
"projection",
"=",
"{",
"'revision'",
":",
"True",
"}",... | Return True if there is a draft version of the document that's ready to
be published. | [
"Return",
"True",
"if",
"there",
"is",
"a",
"draft",
"version",
"of",
"the",
"document",
"that",
"s",
"ready",
"to",
"be",
"published",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L29-L46 |
27,843 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.can_revert | def can_revert(self):
"""
Return True if we can revert the draft version of the document to the
currently published version.
"""
if self.can_publish:
with self.published_context():
return self.count(Q._uid == self._uid) > 0
return False | python | def can_revert(self):
if self.can_publish:
with self.published_context():
return self.count(Q._uid == self._uid) > 0
return False | [
"def",
"can_revert",
"(",
"self",
")",
":",
"if",
"self",
".",
"can_publish",
":",
"with",
"self",
".",
"published_context",
"(",
")",
":",
"return",
"self",
".",
"count",
"(",
"Q",
".",
"_uid",
"==",
"self",
".",
"_uid",
")",
">",
"0",
"return",
"... | Return True if we can revert the draft version of the document to the
currently published version. | [
"Return",
"True",
"if",
"we",
"can",
"revert",
"the",
"draft",
"version",
"of",
"the",
"document",
"to",
"the",
"currently",
"published",
"version",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L49-L59 |
27,844 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.get_publisher_doc | def get_publisher_doc(self):
"""Return a publish safe version of the frame's document"""
with self.draft_context():
# Select the draft document from the database
draft = self.one(Q._uid == self._uid)
publisher_doc = draft._document
# Remove any keys from the document that should not be transferred
# when publishing.
self._remove_keys(publisher_doc, self._unpublished_fields)
return publisher_doc | python | def get_publisher_doc(self):
with self.draft_context():
# Select the draft document from the database
draft = self.one(Q._uid == self._uid)
publisher_doc = draft._document
# Remove any keys from the document that should not be transferred
# when publishing.
self._remove_keys(publisher_doc, self._unpublished_fields)
return publisher_doc | [
"def",
"get_publisher_doc",
"(",
"self",
")",
":",
"with",
"self",
".",
"draft_context",
"(",
")",
":",
"# Select the draft document from the database",
"draft",
"=",
"self",
".",
"one",
"(",
"Q",
".",
"_uid",
"==",
"self",
".",
"_uid",
")",
"publisher_doc",
... | Return a publish safe version of the frame's document | [
"Return",
"a",
"publish",
"safe",
"version",
"of",
"the",
"frame",
"s",
"document"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L61-L72 |
27,845 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.publish | def publish(self):
"""
Publish the current document.
NOTE: You must have saved any changes to the draft version of the
document before publishing, unsaved changes wont be published.
"""
publisher_doc = self.get_publisher_doc()
with self.published_context():
# Select the published document
published = self.one(Q._uid == self._uid)
# If there's no published version of the document create one
if not published:
published = self.__class__()
# Update the document
for field, value in publisher_doc.items():
setattr(published, field, value)
# Save published version
published.upsert()
# Set the revisions number for draft/published version, we use PyMongo
# directly as it's more convienent to use the shared `_uid`.
now = datetime.now()
with self.draft_context():
self.get_collection().update(
{'_uid': self._uid},
{'$set': {'revision': now}}
)
with self.published_context():
self.get_collection().update(
{'_uid': self._uid},
{'$set': {'revision': now}}
) | python | def publish(self):
publisher_doc = self.get_publisher_doc()
with self.published_context():
# Select the published document
published = self.one(Q._uid == self._uid)
# If there's no published version of the document create one
if not published:
published = self.__class__()
# Update the document
for field, value in publisher_doc.items():
setattr(published, field, value)
# Save published version
published.upsert()
# Set the revisions number for draft/published version, we use PyMongo
# directly as it's more convienent to use the shared `_uid`.
now = datetime.now()
with self.draft_context():
self.get_collection().update(
{'_uid': self._uid},
{'$set': {'revision': now}}
)
with self.published_context():
self.get_collection().update(
{'_uid': self._uid},
{'$set': {'revision': now}}
) | [
"def",
"publish",
"(",
"self",
")",
":",
"publisher_doc",
"=",
"self",
".",
"get_publisher_doc",
"(",
")",
"with",
"self",
".",
"published_context",
"(",
")",
":",
"# Select the published document",
"published",
"=",
"self",
".",
"one",
"(",
"Q",
".",
"_uid"... | Publish the current document.
NOTE: You must have saved any changes to the draft version of the
document before publishing, unsaved changes wont be published. | [
"Publish",
"the",
"current",
"document",
"."
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L74-L112 |
27,846 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.new_revision | def new_revision(self, *fields):
"""Save a new revision of the document"""
# Ensure this document is a draft
if not self._id:
assert g.get('draft'), \
'Only draft documents can be assigned new revisions'
else:
with self.draft_context():
assert self.count(Q._id == self._id) == 1, \
'Only draft documents can be assigned new revisions'
# Set the revision
if len(fields) > 0:
fields.append('revision')
self.revision = datetime.now()
# Update the document
self.upsert(*fields) | python | def new_revision(self, *fields):
# Ensure this document is a draft
if not self._id:
assert g.get('draft'), \
'Only draft documents can be assigned new revisions'
else:
with self.draft_context():
assert self.count(Q._id == self._id) == 1, \
'Only draft documents can be assigned new revisions'
# Set the revision
if len(fields) > 0:
fields.append('revision')
self.revision = datetime.now()
# Update the document
self.upsert(*fields) | [
"def",
"new_revision",
"(",
"self",
",",
"*",
"fields",
")",
":",
"# Ensure this document is a draft",
"if",
"not",
"self",
".",
"_id",
":",
"assert",
"g",
".",
"get",
"(",
"'draft'",
")",
",",
"'Only draft documents can be assigned new revisions'",
"else",
":",
... | Save a new revision of the document | [
"Save",
"a",
"new",
"revision",
"of",
"the",
"document"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L114-L133 |
27,847 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.delete | def delete(self):
"""Delete this document and any counterpart document"""
with self.draft_context():
draft = self.one(Q._uid == self._uid)
if draft:
super(PublisherFrame, draft).delete()
with self.published_context():
published = self.one(Q._uid == self._uid)
if published:
super(PublisherFrame, published).delete() | python | def delete(self):
with self.draft_context():
draft = self.one(Q._uid == self._uid)
if draft:
super(PublisherFrame, draft).delete()
with self.published_context():
published = self.one(Q._uid == self._uid)
if published:
super(PublisherFrame, published).delete() | [
"def",
"delete",
"(",
"self",
")",
":",
"with",
"self",
".",
"draft_context",
"(",
")",
":",
"draft",
"=",
"self",
".",
"one",
"(",
"Q",
".",
"_uid",
"==",
"self",
".",
"_uid",
")",
"if",
"draft",
":",
"super",
"(",
"PublisherFrame",
",",
"draft",
... | Delete this document and any counterpart document | [
"Delete",
"this",
"document",
"and",
"any",
"counterpart",
"document"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L135-L146 |
27,848 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.revert | def revert(self):
"""Revert the document to currently published version"""
with self.draft_context():
draft = self.one(Q._uid == self._uid)
with self.published_context():
published = self.one(Q._uid == self._uid)
for field, value in draft._document.items():
if field in self._unpublished_fields:
continue
setattr(draft, field, getattr(published, field))
# Revert the revision
draft.revision = published.revision
draft.update() | python | def revert(self):
with self.draft_context():
draft = self.one(Q._uid == self._uid)
with self.published_context():
published = self.one(Q._uid == self._uid)
for field, value in draft._document.items():
if field in self._unpublished_fields:
continue
setattr(draft, field, getattr(published, field))
# Revert the revision
draft.revision = published.revision
draft.update() | [
"def",
"revert",
"(",
"self",
")",
":",
"with",
"self",
".",
"draft_context",
"(",
")",
":",
"draft",
"=",
"self",
".",
"one",
"(",
"Q",
".",
"_uid",
"==",
"self",
".",
"_uid",
")",
"with",
"self",
".",
"published_context",
"(",
")",
":",
"publishe... | Revert the document to currently published version | [
"Revert",
"the",
"document",
"to",
"currently",
"published",
"version"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L148-L165 |
27,849 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.get_collection | def get_collection(cls):
"""Return a reference to the database collection for the class"""
# By default the collection returned will be the published collection,
# however if the `draft` flag has been set against the global context
# (e.g `g`) then the collection returned will contain draft documents.
if g.get('draft'):
return getattr(
cls.get_db(),
'{collection}_draft'.format(collection=cls._collection)
)
return getattr(cls.get_db(), cls._collection) | python | def get_collection(cls):
# By default the collection returned will be the published collection,
# however if the `draft` flag has been set against the global context
# (e.g `g`) then the collection returned will contain draft documents.
if g.get('draft'):
return getattr(
cls.get_db(),
'{collection}_draft'.format(collection=cls._collection)
)
return getattr(cls.get_db(), cls._collection) | [
"def",
"get_collection",
"(",
"cls",
")",
":",
"# By default the collection returned will be the published collection,",
"# however if the `draft` flag has been set against the global context",
"# (e.g `g`) then the collection returned will contain draft documents.",
"if",
"g",
".",
"get",
... | Return a reference to the database collection for the class | [
"Return",
"a",
"reference",
"to",
"the",
"database",
"collection",
"for",
"the",
"class"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L168-L181 |
27,850 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.draft_context | def draft_context(cls):
"""Set the context to draft"""
previous_state = g.get('draft')
try:
g.draft = True
yield
finally:
g.draft = previous_state | python | def draft_context(cls):
previous_state = g.get('draft')
try:
g.draft = True
yield
finally:
g.draft = previous_state | [
"def",
"draft_context",
"(",
"cls",
")",
":",
"previous_state",
"=",
"g",
".",
"get",
"(",
"'draft'",
")",
"try",
":",
"g",
".",
"draft",
"=",
"True",
"yield",
"finally",
":",
"g",
".",
"draft",
"=",
"previous_state"
] | Set the context to draft | [
"Set",
"the",
"context",
"to",
"draft"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L187-L194 |
27,851 | GetmeUK/MongoFrames | snippets/publishing.py | PublisherFrame.published_context | def published_context(cls):
"""Set the context to published"""
previous_state = g.get('draft')
try:
g.draft = False
yield
finally:
g.draft = previous_state | python | def published_context(cls):
previous_state = g.get('draft')
try:
g.draft = False
yield
finally:
g.draft = previous_state | [
"def",
"published_context",
"(",
"cls",
")",
":",
"previous_state",
"=",
"g",
".",
"get",
"(",
"'draft'",
")",
"try",
":",
"g",
".",
"draft",
"=",
"False",
"yield",
"finally",
":",
"g",
".",
"draft",
"=",
"previous_state"
] | Set the context to published | [
"Set",
"the",
"context",
"to",
"published"
] | 7d2bd792235dfa77a9deecab5366f5f73480823d | https://github.com/GetmeUK/MongoFrames/blob/7d2bd792235dfa77a9deecab5366f5f73480823d/snippets/publishing.py#L198-L205 |
27,852 | src-d/modelforge | modelforge/registry.py | initialize_registry | def initialize_registry(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger):
"""
Initialize the registry and the index.
:param args: :class:`argparse.Namespace` with "backend", "args", "force" and "log_level".
:param backend: Backend which is responsible for working with model files.
:param log: Logger supplied by supply_backend
:return: None
"""
try:
backend.reset(args.force)
except ExistingBackendError:
return 1
log.info("Resetting the index ...")
backend.index.reset()
try:
backend.index.upload("reset", {})
except ValueError:
return 1
log.info("Successfully initialized") | python | def initialize_registry(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger):
try:
backend.reset(args.force)
except ExistingBackendError:
return 1
log.info("Resetting the index ...")
backend.index.reset()
try:
backend.index.upload("reset", {})
except ValueError:
return 1
log.info("Successfully initialized") | [
"def",
"initialize_registry",
"(",
"args",
":",
"argparse",
".",
"Namespace",
",",
"backend",
":",
"StorageBackend",
",",
"log",
":",
"logging",
".",
"Logger",
")",
":",
"try",
":",
"backend",
".",
"reset",
"(",
"args",
".",
"force",
")",
"except",
"Exis... | Initialize the registry and the index.
:param args: :class:`argparse.Namespace` with "backend", "args", "force" and "log_level".
:param backend: Backend which is responsible for working with model files.
:param log: Logger supplied by supply_backend
:return: None | [
"Initialize",
"the",
"registry",
"and",
"the",
"index",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/registry.py#L17-L37 |
27,853 | src-d/modelforge | modelforge/registry.py | publish_model | def publish_model(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger):
"""
Push the model to Google Cloud Storage and updates the index file.
:param args: :class:`argparse.Namespace` with "model", "backend", "args", "force", "meta" \
"update_default", "username", "password", "remote_repo", "template_model", \
"template_readme" and "log_level".
:param backend: Backend which is responsible for working with model files.
:param log: Logger supplied by supply_backend
:return: None if successful, 1 otherwise.
"""
path = os.path.abspath(args.model)
try:
model = GenericModel(source=path, dummy=True)
except ValueError as e:
log.critical('"model" must be a path: %s', e)
return 1
except Exception as e:
log.critical("Failed to load the model: %s: %s" % (type(e).__name__, e))
return 1
base_meta = model.meta
try:
model_url = backend.upload_model(path, base_meta, args.force)
except ModelAlreadyExistsError:
return 1
log.info("Uploaded as %s", model_url)
with open(os.path.join(args.meta), encoding="utf-8") as _in:
extra_meta = json.load(_in)
model_type, model_uuid = base_meta["model"], base_meta["uuid"]
meta = extract_model_meta(base_meta, extra_meta, model_url)
log.info("Updating the models index...")
try:
template_model = backend.index.load_template(args.template_model)
template_readme = backend.index.load_template(args.template_readme)
except ValueError:
return 1
backend.index.add_model(model_type, model_uuid, meta, template_model, args.update_default)
backend.index.update_readme(template_readme)
try:
backend.index.upload("add", {"model": model_type, "uuid": model_uuid})
except ValueError: # TODO: replace with PorcelainError, see related TODO in index.py:181
return 1
log.info("Successfully published.") | python | def publish_model(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger):
path = os.path.abspath(args.model)
try:
model = GenericModel(source=path, dummy=True)
except ValueError as e:
log.critical('"model" must be a path: %s', e)
return 1
except Exception as e:
log.critical("Failed to load the model: %s: %s" % (type(e).__name__, e))
return 1
base_meta = model.meta
try:
model_url = backend.upload_model(path, base_meta, args.force)
except ModelAlreadyExistsError:
return 1
log.info("Uploaded as %s", model_url)
with open(os.path.join(args.meta), encoding="utf-8") as _in:
extra_meta = json.load(_in)
model_type, model_uuid = base_meta["model"], base_meta["uuid"]
meta = extract_model_meta(base_meta, extra_meta, model_url)
log.info("Updating the models index...")
try:
template_model = backend.index.load_template(args.template_model)
template_readme = backend.index.load_template(args.template_readme)
except ValueError:
return 1
backend.index.add_model(model_type, model_uuid, meta, template_model, args.update_default)
backend.index.update_readme(template_readme)
try:
backend.index.upload("add", {"model": model_type, "uuid": model_uuid})
except ValueError: # TODO: replace with PorcelainError, see related TODO in index.py:181
return 1
log.info("Successfully published.") | [
"def",
"publish_model",
"(",
"args",
":",
"argparse",
".",
"Namespace",
",",
"backend",
":",
"StorageBackend",
",",
"log",
":",
"logging",
".",
"Logger",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"args",
".",
"model",
")",
"try",
... | Push the model to Google Cloud Storage and updates the index file.
:param args: :class:`argparse.Namespace` with "model", "backend", "args", "force", "meta" \
"update_default", "username", "password", "remote_repo", "template_model", \
"template_readme" and "log_level".
:param backend: Backend which is responsible for working with model files.
:param log: Logger supplied by supply_backend
:return: None if successful, 1 otherwise. | [
"Push",
"the",
"model",
"to",
"Google",
"Cloud",
"Storage",
"and",
"updates",
"the",
"index",
"file",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/registry.py#L41-L84 |
27,854 | src-d/modelforge | modelforge/registry.py | list_models | def list_models(args: argparse.Namespace):
"""
Output the list of known models in the registry.
:param args: :class:`argparse.Namespace` with "username", "password", "remote_repo" and \
"log_level"
:return: None
"""
try:
git_index = GitIndex(remote=args.index_repo, username=args.username,
password=args.password, cache=args.cache, log_level=args.log_level)
except ValueError:
return 1
for model_type, models in git_index.models.items():
print(model_type)
default = git_index.meta[model_type]["default"]
for uuid, model in sorted(models.items(),
key=lambda m: parse_datetime(m[1]["created_at"]),
reverse=True):
print(" %s %s" % ("*" if default == uuid else " ", uuid),
model["created_at"]) | python | def list_models(args: argparse.Namespace):
try:
git_index = GitIndex(remote=args.index_repo, username=args.username,
password=args.password, cache=args.cache, log_level=args.log_level)
except ValueError:
return 1
for model_type, models in git_index.models.items():
print(model_type)
default = git_index.meta[model_type]["default"]
for uuid, model in sorted(models.items(),
key=lambda m: parse_datetime(m[1]["created_at"]),
reverse=True):
print(" %s %s" % ("*" if default == uuid else " ", uuid),
model["created_at"]) | [
"def",
"list_models",
"(",
"args",
":",
"argparse",
".",
"Namespace",
")",
":",
"try",
":",
"git_index",
"=",
"GitIndex",
"(",
"remote",
"=",
"args",
".",
"index_repo",
",",
"username",
"=",
"args",
".",
"username",
",",
"password",
"=",
"args",
".",
"... | Output the list of known models in the registry.
:param args: :class:`argparse.Namespace` with "username", "password", "remote_repo" and \
"log_level"
:return: None | [
"Output",
"the",
"list",
"of",
"known",
"models",
"in",
"the",
"registry",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/registry.py#L87-L107 |
27,855 | src-d/modelforge | modelforge/tools.py | install_environment | def install_environment(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger):
"""
Install the packages mentioned in the model's metadata.
:param args: :param args: :class:`argparse.Namespace` with "input", "reproduce", "backend", \
"args", "username", "password", "remote_repo" and "log_level".
:param backend: Backend which is responsible for working with model files.
:param log: Logger supplied by supply_backend
:return: None
"""
model = _load_generic_model(args.input, backend, log)
if model is None:
return 1
packages = ["%s==%s" % (pkg, ver) for pkg, ver in model.environment["packages"]]
cmdline = [sys.executable, "-m", "pip", "install"] + args.pip + packages
log.info(" ".join(cmdline))
subprocess.check_call(cmdline)
if args.reproduce:
for dataset in model.datasets:
download_http(dataset[0], dataset[1], log) | python | def install_environment(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger):
model = _load_generic_model(args.input, backend, log)
if model is None:
return 1
packages = ["%s==%s" % (pkg, ver) for pkg, ver in model.environment["packages"]]
cmdline = [sys.executable, "-m", "pip", "install"] + args.pip + packages
log.info(" ".join(cmdline))
subprocess.check_call(cmdline)
if args.reproduce:
for dataset in model.datasets:
download_http(dataset[0], dataset[1], log) | [
"def",
"install_environment",
"(",
"args",
":",
"argparse",
".",
"Namespace",
",",
"backend",
":",
"StorageBackend",
",",
"log",
":",
"logging",
".",
"Logger",
")",
":",
"model",
"=",
"_load_generic_model",
"(",
"args",
".",
"input",
",",
"backend",
",",
"... | Install the packages mentioned in the model's metadata.
:param args: :param args: :class:`argparse.Namespace` with "input", "reproduce", "backend", \
"args", "username", "password", "remote_repo" and "log_level".
:param backend: Backend which is responsible for working with model files.
:param log: Logger supplied by supply_backend
:return: None | [
"Install",
"the",
"packages",
"mentioned",
"in",
"the",
"model",
"s",
"metadata",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/tools.py#L13-L32 |
27,856 | src-d/modelforge | modelforge/tools.py | dump_model | def dump_model(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger):
"""
Print the information about the model.
:param args: :class:`argparse.Namespace` with "input", "backend", "args", "username", \
"password", "remote_repo" and "log_level".
:param backend: Backend which is responsible for working with model files.
:param log: Logger supplied by supply_backend
:return: None
"""
model = _load_generic_model(args.input, backend, log)
if model is None:
return 1
print(model) | python | def dump_model(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger):
model = _load_generic_model(args.input, backend, log)
if model is None:
return 1
print(model) | [
"def",
"dump_model",
"(",
"args",
":",
"argparse",
".",
"Namespace",
",",
"backend",
":",
"StorageBackend",
",",
"log",
":",
"logging",
".",
"Logger",
")",
":",
"model",
"=",
"_load_generic_model",
"(",
"args",
".",
"input",
",",
"backend",
",",
"log",
"... | Print the information about the model.
:param args: :class:`argparse.Namespace` with "input", "backend", "args", "username", \
"password", "remote_repo" and "log_level".
:param backend: Backend which is responsible for working with model files.
:param log: Logger supplied by supply_backend
:return: None | [
"Print",
"the",
"information",
"about",
"the",
"model",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/tools.py#L48-L61 |
27,857 | src-d/modelforge | modelforge/backends.py | register_backend | def register_backend(cls: Type[StorageBackend]):
"""Decorator to register another StorageBackend using it's `NAME`."""
if not issubclass(cls, StorageBackend):
raise TypeError("cls must be a subclass of StorageBackend")
__registry__[cls.NAME] = cls
return cls | python | def register_backend(cls: Type[StorageBackend]):
if not issubclass(cls, StorageBackend):
raise TypeError("cls must be a subclass of StorageBackend")
__registry__[cls.NAME] = cls
return cls | [
"def",
"register_backend",
"(",
"cls",
":",
"Type",
"[",
"StorageBackend",
"]",
")",
":",
"if",
"not",
"issubclass",
"(",
"cls",
",",
"StorageBackend",
")",
":",
"raise",
"TypeError",
"(",
"\"cls must be a subclass of StorageBackend\"",
")",
"__registry__",
"[",
... | Decorator to register another StorageBackend using it's `NAME`. | [
"Decorator",
"to",
"register",
"another",
"StorageBackend",
"using",
"it",
"s",
"NAME",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/backends.py#L13-L18 |
27,858 | src-d/modelforge | modelforge/backends.py | create_backend | def create_backend(name: str=None, git_index: GitIndex=None, args: str=None) -> StorageBackend:
"""Initialize a new StorageBackend by it's name and the specified model registry."""
if name is None:
name = config.BACKEND
if not args:
args = config.BACKEND_ARGS
if args:
try:
kwargs = dict(p.split("=") for p in args.split(","))
except: # flake8: noqa
raise ValueError("Invalid args") from None
else:
kwargs = {}
if git_index is None:
git_index = GitIndex()
kwargs["index"] = git_index
return __registry__[name](**kwargs) | python | def create_backend(name: str=None, git_index: GitIndex=None, args: str=None) -> StorageBackend:
if name is None:
name = config.BACKEND
if not args:
args = config.BACKEND_ARGS
if args:
try:
kwargs = dict(p.split("=") for p in args.split(","))
except: # flake8: noqa
raise ValueError("Invalid args") from None
else:
kwargs = {}
if git_index is None:
git_index = GitIndex()
kwargs["index"] = git_index
return __registry__[name](**kwargs) | [
"def",
"create_backend",
"(",
"name",
":",
"str",
"=",
"None",
",",
"git_index",
":",
"GitIndex",
"=",
"None",
",",
"args",
":",
"str",
"=",
"None",
")",
"->",
"StorageBackend",
":",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"config",
".",
"BACKE... | Initialize a new StorageBackend by it's name and the specified model registry. | [
"Initialize",
"a",
"new",
"StorageBackend",
"by",
"it",
"s",
"name",
"and",
"the",
"specified",
"model",
"registry",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/backends.py#L21-L37 |
27,859 | src-d/modelforge | modelforge/backends.py | create_backend_noexc | def create_backend_noexc(log: logging.Logger, name: str=None, git_index: GitIndex=None,
args: str=None) -> Optional[StorageBackend]:
"""Initialize a new Backend, return None if there was a known problem."""
try:
return create_backend(name, git_index, args)
except KeyError:
log.critical("No such backend: %s (looked in %s)",
name, list(__registry__.keys()))
return None
except ValueError:
log.critical("Invalid backend arguments: %s", args)
return None | python | def create_backend_noexc(log: logging.Logger, name: str=None, git_index: GitIndex=None,
args: str=None) -> Optional[StorageBackend]:
try:
return create_backend(name, git_index, args)
except KeyError:
log.critical("No such backend: %s (looked in %s)",
name, list(__registry__.keys()))
return None
except ValueError:
log.critical("Invalid backend arguments: %s", args)
return None | [
"def",
"create_backend_noexc",
"(",
"log",
":",
"logging",
".",
"Logger",
",",
"name",
":",
"str",
"=",
"None",
",",
"git_index",
":",
"GitIndex",
"=",
"None",
",",
"args",
":",
"str",
"=",
"None",
")",
"->",
"Optional",
"[",
"StorageBackend",
"]",
":"... | Initialize a new Backend, return None if there was a known problem. | [
"Initialize",
"a",
"new",
"Backend",
"return",
"None",
"if",
"there",
"was",
"a",
"known",
"problem",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/backends.py#L40-L51 |
27,860 | src-d/modelforge | modelforge/backends.py | supply_backend | def supply_backend(optional: Union[callable, bool]=False, index_exists: bool=True):
"""
Decorator to pass the initialized backend to the decorated callable. \
Used by command line entries. If the backend cannot be created, return 1.
:param optional: Either a decorated function or a value which indicates whether we should \
construct the backend object if it does not exist in the wrapped function's \
`args`: `True` means we shouldn't.
:param index_exists: Whether the Git model index exists on the remote side or not.
"""
real_optional = False if callable(optional) else optional
def supply_backend_inner(func):
@wraps(func)
def wrapped_supply_backend(args):
log = logging.getLogger(func.__name__)
if real_optional and not getattr(args, "backend", False):
backend = None
else:
try:
git_index = GitIndex(remote=args.index_repo, username=args.username,
password=args.password, cache=args.cache,
exists=index_exists, signoff=args.signoff,
log_level=args.log_level)
except ValueError:
return 1
backend = create_backend_noexc(log, args.backend, git_index, args.args)
if backend is None:
return 1
return func(args, backend, log)
return wrapped_supply_backend
if callable(optional):
return supply_backend_inner(optional)
return supply_backend_inner | python | def supply_backend(optional: Union[callable, bool]=False, index_exists: bool=True):
real_optional = False if callable(optional) else optional
def supply_backend_inner(func):
@wraps(func)
def wrapped_supply_backend(args):
log = logging.getLogger(func.__name__)
if real_optional and not getattr(args, "backend", False):
backend = None
else:
try:
git_index = GitIndex(remote=args.index_repo, username=args.username,
password=args.password, cache=args.cache,
exists=index_exists, signoff=args.signoff,
log_level=args.log_level)
except ValueError:
return 1
backend = create_backend_noexc(log, args.backend, git_index, args.args)
if backend is None:
return 1
return func(args, backend, log)
return wrapped_supply_backend
if callable(optional):
return supply_backend_inner(optional)
return supply_backend_inner | [
"def",
"supply_backend",
"(",
"optional",
":",
"Union",
"[",
"callable",
",",
"bool",
"]",
"=",
"False",
",",
"index_exists",
":",
"bool",
"=",
"True",
")",
":",
"real_optional",
"=",
"False",
"if",
"callable",
"(",
"optional",
")",
"else",
"optional",
"... | Decorator to pass the initialized backend to the decorated callable. \
Used by command line entries. If the backend cannot be created, return 1.
:param optional: Either a decorated function or a value which indicates whether we should \
construct the backend object if it does not exist in the wrapped function's \
`args`: `True` means we shouldn't.
:param index_exists: Whether the Git model index exists on the remote side or not. | [
"Decorator",
"to",
"pass",
"the",
"initialized",
"backend",
"to",
"the",
"decorated",
"callable",
".",
"\\",
"Used",
"by",
"command",
"line",
"entries",
".",
"If",
"the",
"backend",
"cannot",
"be",
"created",
"return",
"1",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/backends.py#L54-L87 |
27,861 | src-d/modelforge | modelforge/meta.py | generate_new_meta | def generate_new_meta(name: str, description: str, vendor: str, license: str) -> dict:
"""
Create the metadata tree for the given model name and the list of dependencies.
:param name: Name of the model.
:param description: Description of the model.
:param vendor: Name of the party which is responsible for support of the model.
:param license: License identifier.
:return: dict with the metadata.
"""
check_license(license)
return {
"code": None,
"created_at": get_datetime_now(),
"datasets": [],
"dependencies": [],
"description": description,
"vendor": vendor,
"environment": collect_environment_without_packages(),
"extra": None,
"license": license,
"metrics": {},
"model": name,
"parent": None,
"references": [],
"series": None,
"tags": [],
"uuid": str(uuid.uuid4()),
"version": [1, 0, 0],
} | python | def generate_new_meta(name: str, description: str, vendor: str, license: str) -> dict:
check_license(license)
return {
"code": None,
"created_at": get_datetime_now(),
"datasets": [],
"dependencies": [],
"description": description,
"vendor": vendor,
"environment": collect_environment_without_packages(),
"extra": None,
"license": license,
"metrics": {},
"model": name,
"parent": None,
"references": [],
"series": None,
"tags": [],
"uuid": str(uuid.uuid4()),
"version": [1, 0, 0],
} | [
"def",
"generate_new_meta",
"(",
"name",
":",
"str",
",",
"description",
":",
"str",
",",
"vendor",
":",
"str",
",",
"license",
":",
"str",
")",
"->",
"dict",
":",
"check_license",
"(",
"license",
")",
"return",
"{",
"\"code\"",
":",
"None",
",",
"\"cr... | Create the metadata tree for the given model name and the list of dependencies.
:param name: Name of the model.
:param description: Description of the model.
:param vendor: Name of the party which is responsible for support of the model.
:param license: License identifier.
:return: dict with the metadata. | [
"Create",
"the",
"metadata",
"tree",
"for",
"the",
"given",
"model",
"name",
"and",
"the",
"list",
"of",
"dependencies",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/meta.py#L24-L53 |
27,862 | src-d/modelforge | modelforge/meta.py | extract_model_meta | def extract_model_meta(base_meta: dict, extra_meta: dict, model_url: str) -> dict:
"""
Merge the metadata from the backend and the extra metadata into a dict which is suitable for \
`index.json`.
:param base_meta: tree["meta"] :class:`dict` containing data from the backend.
:param extra_meta: dict containing data from the user, similar to `template_meta.json`.
:param model_url: public URL of the model.
:return: converted dict.
"""
meta = {"default": {"default": base_meta["uuid"],
"description": base_meta["description"],
"code": extra_meta["code"]}}
del base_meta["model"]
del base_meta["uuid"]
meta["model"] = base_meta
meta["model"].update({k: extra_meta[k] for k in ("code", "datasets", "references", "tags",
"extra")})
response = requests.get(model_url, stream=True)
meta["model"]["size"] = humanize.naturalsize(int(response.headers["content-length"]))
meta["model"]["url"] = model_url
meta["model"]["created_at"] = format_datetime(meta["model"]["created_at"])
return meta | python | def extract_model_meta(base_meta: dict, extra_meta: dict, model_url: str) -> dict:
meta = {"default": {"default": base_meta["uuid"],
"description": base_meta["description"],
"code": extra_meta["code"]}}
del base_meta["model"]
del base_meta["uuid"]
meta["model"] = base_meta
meta["model"].update({k: extra_meta[k] for k in ("code", "datasets", "references", "tags",
"extra")})
response = requests.get(model_url, stream=True)
meta["model"]["size"] = humanize.naturalsize(int(response.headers["content-length"]))
meta["model"]["url"] = model_url
meta["model"]["created_at"] = format_datetime(meta["model"]["created_at"])
return meta | [
"def",
"extract_model_meta",
"(",
"base_meta",
":",
"dict",
",",
"extra_meta",
":",
"dict",
",",
"model_url",
":",
"str",
")",
"->",
"dict",
":",
"meta",
"=",
"{",
"\"default\"",
":",
"{",
"\"default\"",
":",
"base_meta",
"[",
"\"uuid\"",
"]",
",",
"\"de... | Merge the metadata from the backend and the extra metadata into a dict which is suitable for \
`index.json`.
:param base_meta: tree["meta"] :class:`dict` containing data from the backend.
:param extra_meta: dict containing data from the user, similar to `template_meta.json`.
:param model_url: public URL of the model.
:return: converted dict. | [
"Merge",
"the",
"metadata",
"from",
"the",
"backend",
"and",
"the",
"extra",
"metadata",
"into",
"a",
"dict",
"which",
"is",
"suitable",
"for",
"\\",
"index",
".",
"json",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/meta.py#L73-L95 |
27,863 | src-d/modelforge | modelforge/model.py | squeeze_bits | def squeeze_bits(arr: numpy.ndarray) -> numpy.ndarray:
"""Return a copy of an integer numpy array with the minimum bitness."""
assert arr.dtype.kind in ("i", "u")
if arr.dtype.kind == "i":
assert arr.min() >= 0
mlbl = int(arr.max()).bit_length()
if mlbl <= 8:
dtype = numpy.uint8
elif mlbl <= 16:
dtype = numpy.uint16
elif mlbl <= 32:
dtype = numpy.uint32
else:
dtype = numpy.uint64
return arr.astype(dtype) | python | def squeeze_bits(arr: numpy.ndarray) -> numpy.ndarray:
assert arr.dtype.kind in ("i", "u")
if arr.dtype.kind == "i":
assert arr.min() >= 0
mlbl = int(arr.max()).bit_length()
if mlbl <= 8:
dtype = numpy.uint8
elif mlbl <= 16:
dtype = numpy.uint16
elif mlbl <= 32:
dtype = numpy.uint32
else:
dtype = numpy.uint64
return arr.astype(dtype) | [
"def",
"squeeze_bits",
"(",
"arr",
":",
"numpy",
".",
"ndarray",
")",
"->",
"numpy",
".",
"ndarray",
":",
"assert",
"arr",
".",
"dtype",
".",
"kind",
"in",
"(",
"\"i\"",
",",
"\"u\"",
")",
"if",
"arr",
".",
"dtype",
".",
"kind",
"==",
"\"i\"",
":",... | Return a copy of an integer numpy array with the minimum bitness. | [
"Return",
"a",
"copy",
"of",
"an",
"integer",
"numpy",
"array",
"with",
"the",
"minimum",
"bitness",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/model.py#L572-L586 |
27,864 | src-d/modelforge | modelforge/model.py | Model.metaprop | def metaprop(name: str, doc: str, readonly=False):
"""Temporary property builder."""
def get(self):
return self.meta[name]
get.__doc__ = "Get %s%s." % (doc, " (readonly)" if readonly else "")
if not readonly:
def set(self, value):
self.meta[name] = value
set.__doc__ = "Set %s." % doc
return property(get, set)
return property(get) | python | def metaprop(name: str, doc: str, readonly=False):
def get(self):
return self.meta[name]
get.__doc__ = "Get %s%s." % (doc, " (readonly)" if readonly else "")
if not readonly:
def set(self, value):
self.meta[name] = value
set.__doc__ = "Set %s." % doc
return property(get, set)
return property(get) | [
"def",
"metaprop",
"(",
"name",
":",
"str",
",",
"doc",
":",
"str",
",",
"readonly",
"=",
"False",
")",
":",
"def",
"get",
"(",
"self",
")",
":",
"return",
"self",
".",
"meta",
"[",
"name",
"]",
"get",
".",
"__doc__",
"=",
"\"Get %s%s.\"",
"%",
"... | Temporary property builder. | [
"Temporary",
"property",
"builder",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/model.py#L190-L202 |
27,865 | src-d/modelforge | modelforge/model.py | Model.derive | def derive(self, new_version: Union[tuple, list]=None) -> "Model":
"""
Inherit the new model from the current one - used for versioning. \
This operation is in-place.
:param new_version: The version of the new model.
:return: The derived model - self.
"""
meta = self.meta
first_time = self._initial_version == self.version
if new_version is None:
new_version = meta["version"]
new_version[-1] += 1
if not isinstance(new_version, (tuple, list)):
raise ValueError("new_version must be either a list or a tuple, got %s"
% type(new_version))
meta["version"] = list(new_version)
if first_time:
meta["parent"] = meta["uuid"]
meta["uuid"] = str(uuid.uuid4())
return self | python | def derive(self, new_version: Union[tuple, list]=None) -> "Model":
meta = self.meta
first_time = self._initial_version == self.version
if new_version is None:
new_version = meta["version"]
new_version[-1] += 1
if not isinstance(new_version, (tuple, list)):
raise ValueError("new_version must be either a list or a tuple, got %s"
% type(new_version))
meta["version"] = list(new_version)
if first_time:
meta["parent"] = meta["uuid"]
meta["uuid"] = str(uuid.uuid4())
return self | [
"def",
"derive",
"(",
"self",
",",
"new_version",
":",
"Union",
"[",
"tuple",
",",
"list",
"]",
"=",
"None",
")",
"->",
"\"Model\"",
":",
"meta",
"=",
"self",
".",
"meta",
"first_time",
"=",
"self",
".",
"_initial_version",
"==",
"self",
".",
"version"... | Inherit the new model from the current one - used for versioning. \
This operation is in-place.
:param new_version: The version of the new model.
:return: The derived model - self. | [
"Inherit",
"the",
"new",
"model",
"from",
"the",
"current",
"one",
"-",
"used",
"for",
"versioning",
".",
"\\",
"This",
"operation",
"is",
"in",
"-",
"place",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/model.py#L237-L257 |
27,866 | src-d/modelforge | modelforge/model.py | Model.cache_dir | def cache_dir() -> str:
"""Return the default cache directory where downloaded models are stored."""
if config.VENDOR is None:
raise RuntimeError("modelforge is not configured; look at modelforge.configuration. "
"Depending on your objective you may or may not want to create a "
"modelforgecfg.py file which sets VENDOR and the rest.")
return os.path.join("~", "." + config.VENDOR) | python | def cache_dir() -> str:
if config.VENDOR is None:
raise RuntimeError("modelforge is not configured; look at modelforge.configuration. "
"Depending on your objective you may or may not want to create a "
"modelforgecfg.py file which sets VENDOR and the rest.")
return os.path.join("~", "." + config.VENDOR) | [
"def",
"cache_dir",
"(",
")",
"->",
"str",
":",
"if",
"config",
".",
"VENDOR",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"modelforge is not configured; look at modelforge.configuration. \"",
"\"Depending on your objective you may or may not want to create a \"",
"\"mod... | Return the default cache directory where downloaded models are stored. | [
"Return",
"the",
"default",
"cache",
"directory",
"where",
"downloaded",
"models",
"are",
"stored",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/model.py#L335-L341 |
27,867 | src-d/modelforge | modelforge/model.py | Model.get_dep | def get_dep(self, name: str) -> str:
"""
Return the uuid of the dependency identified with "name".
:param name:
:return: UUID
"""
deps = self.meta["dependencies"]
for d in deps:
if d["model"] == name:
return d
raise KeyError("%s not found in %s." % (name, deps)) | python | def get_dep(self, name: str) -> str:
deps = self.meta["dependencies"]
for d in deps:
if d["model"] == name:
return d
raise KeyError("%s not found in %s." % (name, deps)) | [
"def",
"get_dep",
"(",
"self",
",",
"name",
":",
"str",
")",
"->",
"str",
":",
"deps",
"=",
"self",
".",
"meta",
"[",
"\"dependencies\"",
"]",
"for",
"d",
"in",
"deps",
":",
"if",
"d",
"[",
"\"model\"",
"]",
"==",
"name",
":",
"return",
"d",
"rai... | Return the uuid of the dependency identified with "name".
:param name:
:return: UUID | [
"Return",
"the",
"uuid",
"of",
"the",
"dependency",
"identified",
"with",
"name",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/model.py#L343-L354 |
27,868 | src-d/modelforge | modelforge/model.py | Model.set_dep | def set_dep(self, *deps) -> "Model":
"""
Register the dependencies for this model.
:param deps: The parent models: objects or meta dicts.
:return: self
"""
self.meta["dependencies"] = [
(d.meta if not isinstance(d, dict) else d) for d in deps]
return self | python | def set_dep(self, *deps) -> "Model":
self.meta["dependencies"] = [
(d.meta if not isinstance(d, dict) else d) for d in deps]
return self | [
"def",
"set_dep",
"(",
"self",
",",
"*",
"deps",
")",
"->",
"\"Model\"",
":",
"self",
".",
"meta",
"[",
"\"dependencies\"",
"]",
"=",
"[",
"(",
"d",
".",
"meta",
"if",
"not",
"isinstance",
"(",
"d",
",",
"dict",
")",
"else",
"d",
")",
"for",
"d",... | Register the dependencies for this model.
:param deps: The parent models: objects or meta dicts.
:return: self | [
"Register",
"the",
"dependencies",
"for",
"this",
"model",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/model.py#L356-L365 |
27,869 | src-d/modelforge | modelforge/model.py | Model.save | def save(self, output: Union[str, BinaryIO], series: Optional[str] = None,
deps: Iterable=tuple(), create_missing_dirs: bool=True) -> "Model":
"""
Serialize the model to a file.
:param output: Path to the file or a file object.
:param series: Name of the model series. If it is None, it will be taken from \
the current value; if the current value is empty, an error is raised.
:param deps: List of the dependencies.
:param create_missing_dirs: create missing directories in output path if the output is a \
path.
:return: self
"""
check_license(self.license)
if series is None:
if self.series is None:
raise ValueError("series must be specified")
else:
self.series = series
if isinstance(output, str) and create_missing_dirs:
dirs = os.path.split(output)[0]
if dirs:
os.makedirs(dirs, exist_ok=True)
self.set_dep(*deps)
tree = self._generate_tree()
self._write_tree(tree, output)
self._initial_version = self.version
return self | python | def save(self, output: Union[str, BinaryIO], series: Optional[str] = None,
deps: Iterable=tuple(), create_missing_dirs: bool=True) -> "Model":
check_license(self.license)
if series is None:
if self.series is None:
raise ValueError("series must be specified")
else:
self.series = series
if isinstance(output, str) and create_missing_dirs:
dirs = os.path.split(output)[0]
if dirs:
os.makedirs(dirs, exist_ok=True)
self.set_dep(*deps)
tree = self._generate_tree()
self._write_tree(tree, output)
self._initial_version = self.version
return self | [
"def",
"save",
"(",
"self",
",",
"output",
":",
"Union",
"[",
"str",
",",
"BinaryIO",
"]",
",",
"series",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"deps",
":",
"Iterable",
"=",
"tuple",
"(",
")",
",",
"create_missing_dirs",
":",
"bool",
... | Serialize the model to a file.
:param output: Path to the file or a file object.
:param series: Name of the model series. If it is None, it will be taken from \
the current value; if the current value is empty, an error is raised.
:param deps: List of the dependencies.
:param create_missing_dirs: create missing directories in output path if the output is a \
path.
:return: self | [
"Serialize",
"the",
"model",
"to",
"a",
"file",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/model.py#L374-L401 |
27,870 | src-d/modelforge | modelforge/model.py | Model._write_tree | def _write_tree(self, tree: dict, output: Union[str, BinaryIO], file_mode: int=0o666) -> None:
"""
Write the model to disk.
:param tree: The data dict - will be the ASDF tree.
:param output: The output file path or a file object.
:param file_mode: The output file's permissions.
:return: None
"""
self.meta["created_at"] = get_datetime_now()
meta = self.meta.copy()
meta["environment"] = collect_environment()
final_tree = {}
final_tree.update(tree)
final_tree["meta"] = meta
isfileobj = not isinstance(output, str)
if not isfileobj:
self._source = output
path = output
output = open(output, "wb")
os.chmod(path, file_mode)
pos = 0
else:
pos = output.tell()
try:
with asdf.AsdfFile(final_tree) as file:
queue = [("", tree)]
while queue:
path, element = queue.pop()
if isinstance(element, dict):
for key, val in element.items():
queue.append((path + "/" + key, val))
elif isinstance(element, (list, tuple)):
for child in element:
queue.append((path, child))
elif isinstance(element, numpy.ndarray):
path += "/"
if path not in self._compression_prefixes:
self._log.debug("%s -> %s compression", path, self.ARRAY_COMPRESSION)
file.set_array_compression(element, self.ARRAY_COMPRESSION)
else:
self._log.debug("%s -> compression disabled", path)
file.write_to(output)
self._size = output.seek(0, os.SEEK_END) - pos
finally:
if not isfileobj:
output.close() | python | def _write_tree(self, tree: dict, output: Union[str, BinaryIO], file_mode: int=0o666) -> None:
self.meta["created_at"] = get_datetime_now()
meta = self.meta.copy()
meta["environment"] = collect_environment()
final_tree = {}
final_tree.update(tree)
final_tree["meta"] = meta
isfileobj = not isinstance(output, str)
if not isfileobj:
self._source = output
path = output
output = open(output, "wb")
os.chmod(path, file_mode)
pos = 0
else:
pos = output.tell()
try:
with asdf.AsdfFile(final_tree) as file:
queue = [("", tree)]
while queue:
path, element = queue.pop()
if isinstance(element, dict):
for key, val in element.items():
queue.append((path + "/" + key, val))
elif isinstance(element, (list, tuple)):
for child in element:
queue.append((path, child))
elif isinstance(element, numpy.ndarray):
path += "/"
if path not in self._compression_prefixes:
self._log.debug("%s -> %s compression", path, self.ARRAY_COMPRESSION)
file.set_array_compression(element, self.ARRAY_COMPRESSION)
else:
self._log.debug("%s -> compression disabled", path)
file.write_to(output)
self._size = output.seek(0, os.SEEK_END) - pos
finally:
if not isfileobj:
output.close() | [
"def",
"_write_tree",
"(",
"self",
",",
"tree",
":",
"dict",
",",
"output",
":",
"Union",
"[",
"str",
",",
"BinaryIO",
"]",
",",
"file_mode",
":",
"int",
"=",
"0o666",
")",
"->",
"None",
":",
"self",
".",
"meta",
"[",
"\"created_at\"",
"]",
"=",
"g... | Write the model to disk.
:param tree: The data dict - will be the ASDF tree.
:param output: The output file path or a file object.
:param file_mode: The output file's permissions.
:return: None | [
"Write",
"the",
"model",
"to",
"disk",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/model.py#L403-L449 |
27,871 | src-d/modelforge | modelforge/configuration.py | refresh | def refresh():
"""Scan over all the involved directories and load configs from them."""
override_files = []
for stack in traceback.extract_stack():
f = os.path.join(os.path.dirname(stack[0]), OVERRIDE_FILE)
if f not in override_files:
override_files.insert(0, f)
if OVERRIDE_FILE in override_files:
del override_files[override_files.index(OVERRIDE_FILE)]
override_files.append(OVERRIDE_FILE)
def import_path(path):
if sys.version_info < (3, 5, 0):
from importlib.machinery import SourceFileLoader
return SourceFileLoader(__name__, path).load_module()
import importlib.util
spec = importlib.util.spec_from_file_location(__name__, path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
for override_file in override_files:
if not os.path.isfile(override_file):
continue
mod = import_path(override_file)
globals().update({n: getattr(mod, n) for n in dir(mod) if not n.startswith("__")}) | python | def refresh():
override_files = []
for stack in traceback.extract_stack():
f = os.path.join(os.path.dirname(stack[0]), OVERRIDE_FILE)
if f not in override_files:
override_files.insert(0, f)
if OVERRIDE_FILE in override_files:
del override_files[override_files.index(OVERRIDE_FILE)]
override_files.append(OVERRIDE_FILE)
def import_path(path):
if sys.version_info < (3, 5, 0):
from importlib.machinery import SourceFileLoader
return SourceFileLoader(__name__, path).load_module()
import importlib.util
spec = importlib.util.spec_from_file_location(__name__, path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
for override_file in override_files:
if not os.path.isfile(override_file):
continue
mod = import_path(override_file)
globals().update({n: getattr(mod, n) for n in dir(mod) if not n.startswith("__")}) | [
"def",
"refresh",
"(",
")",
":",
"override_files",
"=",
"[",
"]",
"for",
"stack",
"in",
"traceback",
".",
"extract_stack",
"(",
")",
":",
"f",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"stack",
"[",
"0",
"]... | Scan over all the involved directories and load configs from them. | [
"Scan",
"over",
"all",
"the",
"involved",
"directories",
"and",
"load",
"configs",
"from",
"them",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/configuration.py#L17-L42 |
27,872 | src-d/modelforge | modelforge/gcs_backend.py | GCSBackend.create_client | def create_client(self) -> "google.cloud.storage.Client":
"""
Construct GCS API client.
"""
# Client should be imported here because grpc starts threads during import
# and if you call fork after that, a child process will be hang during exit
from google.cloud.storage import Client
if self.credentials:
client = Client.from_service_account_json(self.credentials)
else:
client = Client()
return client | python | def create_client(self) -> "google.cloud.storage.Client":
# Client should be imported here because grpc starts threads during import
# and if you call fork after that, a child process will be hang during exit
from google.cloud.storage import Client
if self.credentials:
client = Client.from_service_account_json(self.credentials)
else:
client = Client()
return client | [
"def",
"create_client",
"(",
"self",
")",
"->",
"\"google.cloud.storage.Client\"",
":",
"# Client should be imported here because grpc starts threads during import",
"# and if you call fork after that, a child process will be hang during exit",
"from",
"google",
".",
"cloud",
".",
"sto... | Construct GCS API client. | [
"Construct",
"GCS",
"API",
"client",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/gcs_backend.py#L82-L93 |
27,873 | src-d/modelforge | modelforge/gcs_backend.py | GCSBackend.connect | def connect(self) -> "google.cloud.storage.Bucket":
"""
Connect to the assigned bucket.
"""
log = self._log
log.info("Connecting to the bucket...")
client = self.create_client()
return client.lookup_bucket(self.bucket_name) | python | def connect(self) -> "google.cloud.storage.Bucket":
log = self._log
log.info("Connecting to the bucket...")
client = self.create_client()
return client.lookup_bucket(self.bucket_name) | [
"def",
"connect",
"(",
"self",
")",
"->",
"\"google.cloud.storage.Bucket\"",
":",
"log",
"=",
"self",
".",
"_log",
"log",
".",
"info",
"(",
"\"Connecting to the bucket...\"",
")",
"client",
"=",
"self",
".",
"create_client",
"(",
")",
"return",
"client",
".",
... | Connect to the assigned bucket. | [
"Connect",
"to",
"the",
"assigned",
"bucket",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/gcs_backend.py#L95-L102 |
27,874 | src-d/modelforge | modelforge/gcs_backend.py | GCSBackend.reset | def reset(self, force):
"""Connect to the assigned bucket or create if needed. Clear all the blobs inside."""
client = self.create_client()
bucket = client.lookup_bucket(self.bucket_name)
if bucket is not None:
if not force:
self._log.error("Bucket already exists, aborting.")
raise ExistingBackendError
self._log.info("Bucket already exists, deleting all content.")
for blob in bucket.list_blobs():
self._log.info("Deleting %s ..." % blob.name)
bucket.delete_blob(blob.name)
else:
client.create_bucket(self.bucket_name) | python | def reset(self, force):
client = self.create_client()
bucket = client.lookup_bucket(self.bucket_name)
if bucket is not None:
if not force:
self._log.error("Bucket already exists, aborting.")
raise ExistingBackendError
self._log.info("Bucket already exists, deleting all content.")
for blob in bucket.list_blobs():
self._log.info("Deleting %s ..." % blob.name)
bucket.delete_blob(blob.name)
else:
client.create_bucket(self.bucket_name) | [
"def",
"reset",
"(",
"self",
",",
"force",
")",
":",
"client",
"=",
"self",
".",
"create_client",
"(",
")",
"bucket",
"=",
"client",
".",
"lookup_bucket",
"(",
"self",
".",
"bucket_name",
")",
"if",
"bucket",
"is",
"not",
"None",
":",
"if",
"not",
"f... | Connect to the assigned bucket or create if needed. Clear all the blobs inside. | [
"Connect",
"to",
"the",
"assigned",
"bucket",
"or",
"create",
"if",
"needed",
".",
"Clear",
"all",
"the",
"blobs",
"inside",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/gcs_backend.py#L104-L117 |
27,875 | src-d/modelforge | modelforge/gcs_backend.py | GCSBackend.upload_model | def upload_model(self, path: str, meta: dict, force: bool):
"""Put the model to GCS."""
bucket = self.connect()
if bucket is None:
raise BackendRequiredError
blob = bucket.blob("models/%s/%s.asdf" % (meta["model"], meta["uuid"]))
if blob.exists() and not force:
self._log.error("Model %s already exists, aborted.", meta["uuid"])
raise ModelAlreadyExistsError
self._log.info("Uploading %s from %s...", meta["model"], os.path.abspath(path))
def tracker(data):
return self._Tracker(data, self._log)
make_transport = blob._make_transport
def make_transport_with_progress(client):
transport = make_transport(client)
request = transport.request
def request_with_progress(method, url, data=None, headers=None, **kwargs):
return request(method, url, data=tracker(data), headers=headers, **kwargs)
transport.request = request_with_progress
return transport
blob._make_transport = make_transport_with_progress
with open(path, "rb") as fin:
blob.upload_from_file(fin, content_type="application/x-yaml")
blob.make_public()
return blob.public_url | python | def upload_model(self, path: str, meta: dict, force: bool):
bucket = self.connect()
if bucket is None:
raise BackendRequiredError
blob = bucket.blob("models/%s/%s.asdf" % (meta["model"], meta["uuid"]))
if blob.exists() and not force:
self._log.error("Model %s already exists, aborted.", meta["uuid"])
raise ModelAlreadyExistsError
self._log.info("Uploading %s from %s...", meta["model"], os.path.abspath(path))
def tracker(data):
return self._Tracker(data, self._log)
make_transport = blob._make_transport
def make_transport_with_progress(client):
transport = make_transport(client)
request = transport.request
def request_with_progress(method, url, data=None, headers=None, **kwargs):
return request(method, url, data=tracker(data), headers=headers, **kwargs)
transport.request = request_with_progress
return transport
blob._make_transport = make_transport_with_progress
with open(path, "rb") as fin:
blob.upload_from_file(fin, content_type="application/x-yaml")
blob.make_public()
return blob.public_url | [
"def",
"upload_model",
"(",
"self",
",",
"path",
":",
"str",
",",
"meta",
":",
"dict",
",",
"force",
":",
"bool",
")",
":",
"bucket",
"=",
"self",
".",
"connect",
"(",
")",
"if",
"bucket",
"is",
"None",
":",
"raise",
"BackendRequiredError",
"blob",
"... | Put the model to GCS. | [
"Put",
"the",
"model",
"to",
"GCS",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/gcs_backend.py#L119-L150 |
27,876 | src-d/modelforge | modelforge/gcs_backend.py | GCSBackend.fetch_model | def fetch_model(self, source: str, file: Union[str, BinaryIO],
chunk_size: int=DEFAULT_DOWNLOAD_CHUNK_SIZE) -> None:
"""Download the model from GCS."""
download_http(source, file, self._log, chunk_size) | python | def fetch_model(self, source: str, file: Union[str, BinaryIO],
chunk_size: int=DEFAULT_DOWNLOAD_CHUNK_SIZE) -> None:
download_http(source, file, self._log, chunk_size) | [
"def",
"fetch_model",
"(",
"self",
",",
"source",
":",
"str",
",",
"file",
":",
"Union",
"[",
"str",
",",
"BinaryIO",
"]",
",",
"chunk_size",
":",
"int",
"=",
"DEFAULT_DOWNLOAD_CHUNK_SIZE",
")",
"->",
"None",
":",
"download_http",
"(",
"source",
",",
"fi... | Download the model from GCS. | [
"Download",
"the",
"model",
"from",
"GCS",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/gcs_backend.py#L152-L155 |
27,877 | src-d/modelforge | modelforge/gcs_backend.py | GCSBackend.delete_model | def delete_model(self, meta: dict):
"""Delete the model from GCS."""
bucket = self.connect()
if bucket is None:
raise BackendRequiredError
blob_name = "models/%s/%s.asdf" % (meta["model"], meta["uuid"])
self._log.info(blob_name)
try:
self._log.info("Deleting model ...")
bucket.delete_blob(blob_name)
except NotFound:
self._log.warning("Model %s already deleted.", meta["uuid"]) | python | def delete_model(self, meta: dict):
bucket = self.connect()
if bucket is None:
raise BackendRequiredError
blob_name = "models/%s/%s.asdf" % (meta["model"], meta["uuid"])
self._log.info(blob_name)
try:
self._log.info("Deleting model ...")
bucket.delete_blob(blob_name)
except NotFound:
self._log.warning("Model %s already deleted.", meta["uuid"]) | [
"def",
"delete_model",
"(",
"self",
",",
"meta",
":",
"dict",
")",
":",
"bucket",
"=",
"self",
".",
"connect",
"(",
")",
"if",
"bucket",
"is",
"None",
":",
"raise",
"BackendRequiredError",
"blob_name",
"=",
"\"models/%s/%s.asdf\"",
"%",
"(",
"meta",
"[",
... | Delete the model from GCS. | [
"Delete",
"the",
"model",
"from",
"GCS",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/gcs_backend.py#L157-L168 |
27,878 | src-d/modelforge | modelforge/storage_backend.py | download_http | def download_http(source: str, file: Union[str, BinaryIO], log: logging.Logger,
chunk_size: int=DEFAULT_DOWNLOAD_CHUNK_SIZE) -> None:
"""
Download a file from an HTTP source.
:param source: URL to fetch.
:param file: Where to store the downloaded data.
:param log: Logger.
:param chunk_size: Size of download buffer.
"""
log.info("Fetching %s...", source)
r = requests.get(source, stream=True)
if r.status_code != 200:
log.error(
"An error occurred while fetching the model, with code %s" % r.status_code)
raise ValueError
if isinstance(file, str):
os.makedirs(os.path.dirname(file), exist_ok=True)
f = open(file, "wb")
else:
f = file
try:
total_length = int(r.headers.get("content-length"))
num_chunks = math.ceil(total_length / chunk_size)
if num_chunks == 1:
f.write(r.content)
else:
for chunk in progress_bar(
r.iter_content(chunk_size=chunk_size),
log,
expected_size=num_chunks):
if chunk:
f.write(chunk)
finally:
if isinstance(file, str):
f.close() | python | def download_http(source: str, file: Union[str, BinaryIO], log: logging.Logger,
chunk_size: int=DEFAULT_DOWNLOAD_CHUNK_SIZE) -> None:
log.info("Fetching %s...", source)
r = requests.get(source, stream=True)
if r.status_code != 200:
log.error(
"An error occurred while fetching the model, with code %s" % r.status_code)
raise ValueError
if isinstance(file, str):
os.makedirs(os.path.dirname(file), exist_ok=True)
f = open(file, "wb")
else:
f = file
try:
total_length = int(r.headers.get("content-length"))
num_chunks = math.ceil(total_length / chunk_size)
if num_chunks == 1:
f.write(r.content)
else:
for chunk in progress_bar(
r.iter_content(chunk_size=chunk_size),
log,
expected_size=num_chunks):
if chunk:
f.write(chunk)
finally:
if isinstance(file, str):
f.close() | [
"def",
"download_http",
"(",
"source",
":",
"str",
",",
"file",
":",
"Union",
"[",
"str",
",",
"BinaryIO",
"]",
",",
"log",
":",
"logging",
".",
"Logger",
",",
"chunk_size",
":",
"int",
"=",
"DEFAULT_DOWNLOAD_CHUNK_SIZE",
")",
"->",
"None",
":",
"log",
... | Download a file from an HTTP source.
:param source: URL to fetch.
:param file: Where to store the downloaded data.
:param log: Logger.
:param chunk_size: Size of download buffer. | [
"Download",
"a",
"file",
"from",
"an",
"HTTP",
"source",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/storage_backend.py#L111-L146 |
27,879 | src-d/modelforge | modelforge/storage_backend.py | StorageBackend.upload_model | def upload_model(self, path: str, meta: dict, force: bool) -> str:
"""
Put the given file to the remote storage.
:param path: Path to the model file.
:param meta: Metadata of the model.
:param force: Overwrite an existing model.
:return: URL of the uploaded model.
:raises BackendRequiredError: If supplied bucket is unusable.
:raises ModelAlreadyExistsError: If model already exists and no forcing.
"""
raise NotImplementedError | python | def upload_model(self, path: str, meta: dict, force: bool) -> str:
raise NotImplementedError | [
"def",
"upload_model",
"(",
"self",
",",
"path",
":",
"str",
",",
"meta",
":",
"dict",
",",
"force",
":",
"bool",
")",
"->",
"str",
":",
"raise",
"NotImplementedError"
] | Put the given file to the remote storage.
:param path: Path to the model file.
:param meta: Metadata of the model.
:param force: Overwrite an existing model.
:return: URL of the uploaded model.
:raises BackendRequiredError: If supplied bucket is unusable.
:raises ModelAlreadyExistsError: If model already exists and no forcing. | [
"Put",
"the",
"given",
"file",
"to",
"the",
"remote",
"storage",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/storage_backend.py#L48-L59 |
27,880 | src-d/modelforge | modelforge/slogging.py | setup | def setup(level: Union[str, int], structured: bool, config_path: str = None):
"""
Make stdout and stderr unicode friendly in case of misconfigured \
environments, initializes the logging, structured logging and \
enables colored logs if it is appropriate.
:param level: The global logging level.
:param structured: Output JSON logs to stdout.
:param config_path: Path to a yaml file that configures the level of output of the loggers. \
Root logger level is set through the level argument and will override any \
root configuration found in the conf file.
:return: None
"""
global logs_are_structured
logs_are_structured = structured
if not isinstance(level, int):
level = logging._nameToLevel[level]
def ensure_utf8_stream(stream):
if not isinstance(stream, io.StringIO) and hasattr(stream, "buffer"):
stream = codecs.getwriter("utf-8")(stream.buffer)
stream.encoding = "utf-8"
return stream
sys.stdout, sys.stderr = (ensure_utf8_stream(s)
for s in (sys.stdout, sys.stderr))
# basicConfig is only called to make sure there is at least one handler for the root logger.
# All the output level setting is down right afterwards.
logging.basicConfig()
logging.setLogRecordFactory(NumpyLogRecord)
if config_path is not None and os.path.isfile(config_path):
with open(config_path) as fh:
config = yaml.safe_load(fh)
for key, val in config.items():
logging.getLogger(key).setLevel(logging._nameToLevel.get(val, level))
root = logging.getLogger()
root.setLevel(level)
if not structured:
if not sys.stdin.closed and sys.stdout.isatty():
handler = root.handlers[0]
handler.setFormatter(AwesomeFormatter())
else:
root.handlers[0] = StructuredHandler(level) | python | def setup(level: Union[str, int], structured: bool, config_path: str = None):
global logs_are_structured
logs_are_structured = structured
if not isinstance(level, int):
level = logging._nameToLevel[level]
def ensure_utf8_stream(stream):
if not isinstance(stream, io.StringIO) and hasattr(stream, "buffer"):
stream = codecs.getwriter("utf-8")(stream.buffer)
stream.encoding = "utf-8"
return stream
sys.stdout, sys.stderr = (ensure_utf8_stream(s)
for s in (sys.stdout, sys.stderr))
# basicConfig is only called to make sure there is at least one handler for the root logger.
# All the output level setting is down right afterwards.
logging.basicConfig()
logging.setLogRecordFactory(NumpyLogRecord)
if config_path is not None and os.path.isfile(config_path):
with open(config_path) as fh:
config = yaml.safe_load(fh)
for key, val in config.items():
logging.getLogger(key).setLevel(logging._nameToLevel.get(val, level))
root = logging.getLogger()
root.setLevel(level)
if not structured:
if not sys.stdin.closed and sys.stdout.isatty():
handler = root.handlers[0]
handler.setFormatter(AwesomeFormatter())
else:
root.handlers[0] = StructuredHandler(level) | [
"def",
"setup",
"(",
"level",
":",
"Union",
"[",
"str",
",",
"int",
"]",
",",
"structured",
":",
"bool",
",",
"config_path",
":",
"str",
"=",
"None",
")",
":",
"global",
"logs_are_structured",
"logs_are_structured",
"=",
"structured",
"if",
"not",
"isinsta... | Make stdout and stderr unicode friendly in case of misconfigured \
environments, initializes the logging, structured logging and \
enables colored logs if it is appropriate.
:param level: The global logging level.
:param structured: Output JSON logs to stdout.
:param config_path: Path to a yaml file that configures the level of output of the loggers. \
Root logger level is set through the level argument and will override any \
root configuration found in the conf file.
:return: None | [
"Make",
"stdout",
"and",
"stderr",
"unicode",
"friendly",
"in",
"case",
"of",
"misconfigured",
"\\",
"environments",
"initializes",
"the",
"logging",
"structured",
"logging",
"and",
"\\",
"enables",
"colored",
"logs",
"if",
"it",
"is",
"appropriate",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/slogging.py#L164-L209 |
27,881 | src-d/modelforge | modelforge/slogging.py | set_context | def set_context(context):
"""Assign the logging context - an abstract object - to the current thread."""
try:
handler = logging.getLogger().handlers[0]
except IndexError:
# logging is not initialized
return
if not isinstance(handler, StructuredHandler):
return
handler.acquire()
try:
handler.local.context = context
finally:
handler.release() | python | def set_context(context):
try:
handler = logging.getLogger().handlers[0]
except IndexError:
# logging is not initialized
return
if not isinstance(handler, StructuredHandler):
return
handler.acquire()
try:
handler.local.context = context
finally:
handler.release() | [
"def",
"set_context",
"(",
"context",
")",
":",
"try",
":",
"handler",
"=",
"logging",
".",
"getLogger",
"(",
")",
".",
"handlers",
"[",
"0",
"]",
"except",
"IndexError",
":",
"# logging is not initialized",
"return",
"if",
"not",
"isinstance",
"(",
"handler... | Assign the logging context - an abstract object - to the current thread. | [
"Assign",
"the",
"logging",
"context",
"-",
"an",
"abstract",
"object",
"-",
"to",
"the",
"current",
"thread",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/slogging.py#L212-L225 |
27,882 | src-d/modelforge | modelforge/slogging.py | add_logging_args | def add_logging_args(parser: argparse.ArgumentParser, patch: bool = True,
erase_args: bool = True) -> None:
"""
Add command line flags specific to logging.
:param parser: `argparse` parser where to add new flags.
:param erase_args: Automatically remove logging-related flags from parsed args.
:param patch: Patch parse_args() to automatically setup logging.
"""
parser.add_argument("--log-level", default="INFO", choices=logging._nameToLevel,
help="Logging verbosity.")
parser.add_argument("--log-structured", action="store_true",
help="Enable structured logging (JSON record per line).")
parser.add_argument("--log-config",
help="Path to the file which sets individual log levels of domains.")
# monkey-patch parse_args()
# custom actions do not work, unfortunately, because they are not invoked if
# the corresponding --flags are not specified
def _patched_parse_args(args=None, namespace=None) -> argparse.Namespace:
args = parser._original_parse_args(args, namespace)
setup(args.log_level, args.log_structured, args.log_config)
if erase_args:
for log_arg in ("log_level", "log_structured", "log_config"):
delattr(args, log_arg)
return args
if patch and not hasattr(parser, "_original_parse_args"):
parser._original_parse_args = parser.parse_args
parser.parse_args = _patched_parse_args | python | def add_logging_args(parser: argparse.ArgumentParser, patch: bool = True,
erase_args: bool = True) -> None:
parser.add_argument("--log-level", default="INFO", choices=logging._nameToLevel,
help="Logging verbosity.")
parser.add_argument("--log-structured", action="store_true",
help="Enable structured logging (JSON record per line).")
parser.add_argument("--log-config",
help="Path to the file which sets individual log levels of domains.")
# monkey-patch parse_args()
# custom actions do not work, unfortunately, because they are not invoked if
# the corresponding --flags are not specified
def _patched_parse_args(args=None, namespace=None) -> argparse.Namespace:
args = parser._original_parse_args(args, namespace)
setup(args.log_level, args.log_structured, args.log_config)
if erase_args:
for log_arg in ("log_level", "log_structured", "log_config"):
delattr(args, log_arg)
return args
if patch and not hasattr(parser, "_original_parse_args"):
parser._original_parse_args = parser.parse_args
parser.parse_args = _patched_parse_args | [
"def",
"add_logging_args",
"(",
"parser",
":",
"argparse",
".",
"ArgumentParser",
",",
"patch",
":",
"bool",
"=",
"True",
",",
"erase_args",
":",
"bool",
"=",
"True",
")",
"->",
"None",
":",
"parser",
".",
"add_argument",
"(",
"\"--log-level\"",
",",
"defa... | Add command line flags specific to logging.
:param parser: `argparse` parser where to add new flags.
:param erase_args: Automatically remove logging-related flags from parsed args.
:param patch: Patch parse_args() to automatically setup logging. | [
"Add",
"command",
"line",
"flags",
"specific",
"to",
"logging",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/slogging.py#L228-L257 |
27,883 | src-d/modelforge | modelforge/slogging.py | NumpyLogRecord.array2string | def array2string(arr: numpy.ndarray) -> str:
"""Format numpy array as a string."""
shape = str(arr.shape)[1:-1]
if shape.endswith(","):
shape = shape[:-1]
return numpy.array2string(arr, threshold=11) + "%s[%s]" % (arr.dtype, shape) | python | def array2string(arr: numpy.ndarray) -> str:
shape = str(arr.shape)[1:-1]
if shape.endswith(","):
shape = shape[:-1]
return numpy.array2string(arr, threshold=11) + "%s[%s]" % (arr.dtype, shape) | [
"def",
"array2string",
"(",
"arr",
":",
"numpy",
".",
"ndarray",
")",
"->",
"str",
":",
"shape",
"=",
"str",
"(",
"arr",
".",
"shape",
")",
"[",
"1",
":",
"-",
"1",
"]",
"if",
"shape",
".",
"endswith",
"(",
"\",\"",
")",
":",
"shape",
"=",
"sha... | Format numpy array as a string. | [
"Format",
"numpy",
"array",
"as",
"a",
"string",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/slogging.py#L64-L69 |
27,884 | src-d/modelforge | modelforge/slogging.py | NumpyLogRecord.getMessage | def getMessage(self):
"""
Return the message for this LogRecord.
Return the message for this LogRecord after merging any user-supplied \
arguments with the message.
"""
if isinstance(self.msg, numpy.ndarray):
msg = self.array2string(self.msg)
else:
msg = str(self.msg)
if self.args:
a2s = self.array2string
if isinstance(self.args, Dict):
args = {k: (a2s(v) if isinstance(v, numpy.ndarray) else v)
for (k, v) in self.args.items()}
elif isinstance(self.args, Sequence):
args = tuple((a2s(a) if isinstance(a, numpy.ndarray) else a)
for a in self.args)
else:
raise TypeError("Unexpected input '%s' with type '%s'" % (self.args,
type(self.args)))
msg = msg % args
return msg | python | def getMessage(self):
if isinstance(self.msg, numpy.ndarray):
msg = self.array2string(self.msg)
else:
msg = str(self.msg)
if self.args:
a2s = self.array2string
if isinstance(self.args, Dict):
args = {k: (a2s(v) if isinstance(v, numpy.ndarray) else v)
for (k, v) in self.args.items()}
elif isinstance(self.args, Sequence):
args = tuple((a2s(a) if isinstance(a, numpy.ndarray) else a)
for a in self.args)
else:
raise TypeError("Unexpected input '%s' with type '%s'" % (self.args,
type(self.args)))
msg = msg % args
return msg | [
"def",
"getMessage",
"(",
"self",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"msg",
",",
"numpy",
".",
"ndarray",
")",
":",
"msg",
"=",
"self",
".",
"array2string",
"(",
"self",
".",
"msg",
")",
"else",
":",
"msg",
"=",
"str",
"(",
"self",
"... | Return the message for this LogRecord.
Return the message for this LogRecord after merging any user-supplied \
arguments with the message. | [
"Return",
"the",
"message",
"for",
"this",
"LogRecord",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/slogging.py#L71-L94 |
27,885 | src-d/modelforge | modelforge/slogging.py | AwesomeFormatter.formatMessage | def formatMessage(self, record: logging.LogRecord) -> str:
"""Convert the already filled log record to a string."""
level_color = "0"
text_color = "0"
fmt = ""
if record.levelno <= logging.DEBUG:
fmt = "\033[0;37m" + logging.BASIC_FORMAT + "\033[0m"
elif record.levelno <= logging.INFO:
level_color = "1;36"
lmsg = record.message.lower()
if self.GREEN_RE.search(lmsg):
text_color = "1;32"
elif record.levelno <= logging.WARNING:
level_color = "1;33"
elif record.levelno <= logging.CRITICAL:
level_color = "1;31"
if not fmt:
fmt = "\033[" + level_color + \
"m%(levelname)s\033[0m:%(rthread)s:%(name)s:\033[" + text_color + \
"m%(message)s\033[0m"
fmt = _fest + fmt
record.rthread = reduce_thread_id(record.thread)
return fmt % record.__dict__ | python | def formatMessage(self, record: logging.LogRecord) -> str:
level_color = "0"
text_color = "0"
fmt = ""
if record.levelno <= logging.DEBUG:
fmt = "\033[0;37m" + logging.BASIC_FORMAT + "\033[0m"
elif record.levelno <= logging.INFO:
level_color = "1;36"
lmsg = record.message.lower()
if self.GREEN_RE.search(lmsg):
text_color = "1;32"
elif record.levelno <= logging.WARNING:
level_color = "1;33"
elif record.levelno <= logging.CRITICAL:
level_color = "1;31"
if not fmt:
fmt = "\033[" + level_color + \
"m%(levelname)s\033[0m:%(rthread)s:%(name)s:\033[" + text_color + \
"m%(message)s\033[0m"
fmt = _fest + fmt
record.rthread = reduce_thread_id(record.thread)
return fmt % record.__dict__ | [
"def",
"formatMessage",
"(",
"self",
",",
"record",
":",
"logging",
".",
"LogRecord",
")",
"->",
"str",
":",
"level_color",
"=",
"\"0\"",
"text_color",
"=",
"\"0\"",
"fmt",
"=",
"\"\"",
"if",
"record",
".",
"levelno",
"<=",
"logging",
".",
"DEBUG",
":",
... | Convert the already filled log record to a string. | [
"Convert",
"the",
"already",
"filled",
"log",
"record",
"to",
"a",
"string",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/slogging.py#L106-L128 |
27,886 | src-d/modelforge | modelforge/slogging.py | StructuredHandler.emit | def emit(self, record: logging.LogRecord):
"""Print the log record formatted as JSON to stdout."""
created = datetime.datetime.fromtimestamp(record.created, timezone)
obj = {
"level": record.levelname.lower(),
"msg": record.msg % record.args,
"source": "%s:%d" % (record.filename, record.lineno),
"time": format_datetime(created),
"thread": reduce_thread_id(record.thread),
}
if record.exc_info is not None:
obj["error"] = traceback.format_exception(*record.exc_info)[1:]
try:
obj["context"] = self.local.context
except AttributeError:
pass
json.dump(obj, sys.stdout, sort_keys=True)
sys.stdout.write("\n")
sys.stdout.flush() | python | def emit(self, record: logging.LogRecord):
created = datetime.datetime.fromtimestamp(record.created, timezone)
obj = {
"level": record.levelname.lower(),
"msg": record.msg % record.args,
"source": "%s:%d" % (record.filename, record.lineno),
"time": format_datetime(created),
"thread": reduce_thread_id(record.thread),
}
if record.exc_info is not None:
obj["error"] = traceback.format_exception(*record.exc_info)[1:]
try:
obj["context"] = self.local.context
except AttributeError:
pass
json.dump(obj, sys.stdout, sort_keys=True)
sys.stdout.write("\n")
sys.stdout.flush() | [
"def",
"emit",
"(",
"self",
",",
"record",
":",
"logging",
".",
"LogRecord",
")",
":",
"created",
"=",
"datetime",
".",
"datetime",
".",
"fromtimestamp",
"(",
"record",
".",
"created",
",",
"timezone",
")",
"obj",
"=",
"{",
"\"level\"",
":",
"record",
... | Print the log record formatted as JSON to stdout. | [
"Print",
"the",
"log",
"record",
"formatted",
"as",
"JSON",
"to",
"stdout",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/slogging.py#L139-L157 |
27,887 | src-d/modelforge | modelforge/models.py | register_model | def register_model(cls: Type[Model]):
"""
Include the given model class into the registry.
:param cls: The class of the registered model.
:return: None
"""
if not issubclass(cls, Model):
raise TypeError("model bust be a subclass of Model")
if issubclass(cls, GenericModel):
raise TypeError("model must not be a subclass of GenericModel")
__models__.add(cls)
return cls | python | def register_model(cls: Type[Model]):
if not issubclass(cls, Model):
raise TypeError("model bust be a subclass of Model")
if issubclass(cls, GenericModel):
raise TypeError("model must not be a subclass of GenericModel")
__models__.add(cls)
return cls | [
"def",
"register_model",
"(",
"cls",
":",
"Type",
"[",
"Model",
"]",
")",
":",
"if",
"not",
"issubclass",
"(",
"cls",
",",
"Model",
")",
":",
"raise",
"TypeError",
"(",
"\"model bust be a subclass of Model\"",
")",
"if",
"issubclass",
"(",
"cls",
",",
"Gen... | Include the given model class into the registry.
:param cls: The class of the registered model.
:return: None | [
"Include",
"the",
"given",
"model",
"class",
"into",
"the",
"registry",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/models.py#L10-L22 |
27,888 | src-d/modelforge | modelforge/index.py | GitIndex.fetch | def fetch(self):
"""Load from the associated Git repository."""
os.makedirs(os.path.dirname(self.cached_repo), exist_ok=True)
if not os.path.exists(self.cached_repo):
self._log.warning("Index not found, caching %s in %s", self.repo, self.cached_repo)
git.clone(self.remote_url, self.cached_repo, checkout=True)
else:
self._log.debug("Index is cached")
if self._are_local_and_remote_heads_different():
self._log.info("Cached index is not up to date, pulling %s", self. repo)
git.pull(self.cached_repo, self.remote_url)
with open(os.path.join(self.cached_repo, self.INDEX_FILE), encoding="utf-8") as _in:
self.contents = json.load(_in) | python | def fetch(self):
os.makedirs(os.path.dirname(self.cached_repo), exist_ok=True)
if not os.path.exists(self.cached_repo):
self._log.warning("Index not found, caching %s in %s", self.repo, self.cached_repo)
git.clone(self.remote_url, self.cached_repo, checkout=True)
else:
self._log.debug("Index is cached")
if self._are_local_and_remote_heads_different():
self._log.info("Cached index is not up to date, pulling %s", self. repo)
git.pull(self.cached_repo, self.remote_url)
with open(os.path.join(self.cached_repo, self.INDEX_FILE), encoding="utf-8") as _in:
self.contents = json.load(_in) | [
"def",
"fetch",
"(",
"self",
")",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"cached_repo",
")",
",",
"exist_ok",
"=",
"True",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"cache... | Load from the associated Git repository. | [
"Load",
"from",
"the",
"associated",
"Git",
"repository",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/index.py#L102-L114 |
27,889 | src-d/modelforge | modelforge/index.py | GitIndex.update_readme | def update_readme(self, template_readme: Template):
"""Generate the new README file locally."""
readme = os.path.join(self.cached_repo, "README.md")
if os.path.exists(readme):
os.remove(readme)
links = {model_type: {} for model_type in self.models.keys()}
for model_type, model_uuids in self.models.items():
for model_uuid in model_uuids:
links[model_type][model_uuid] = os.path.join("/", model_type, "%s.md" % model_uuid)
with open(readme, "w") as fout:
fout.write(template_readme.render(models=self.models, meta=self.meta, links=links))
git.add(self.cached_repo, [readme])
self._log.info("Updated %s", readme) | python | def update_readme(self, template_readme: Template):
readme = os.path.join(self.cached_repo, "README.md")
if os.path.exists(readme):
os.remove(readme)
links = {model_type: {} for model_type in self.models.keys()}
for model_type, model_uuids in self.models.items():
for model_uuid in model_uuids:
links[model_type][model_uuid] = os.path.join("/", model_type, "%s.md" % model_uuid)
with open(readme, "w") as fout:
fout.write(template_readme.render(models=self.models, meta=self.meta, links=links))
git.add(self.cached_repo, [readme])
self._log.info("Updated %s", readme) | [
"def",
"update_readme",
"(",
"self",
",",
"template_readme",
":",
"Template",
")",
":",
"readme",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"cached_repo",
",",
"\"README.md\"",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"readme",
")... | Generate the new README file locally. | [
"Generate",
"the",
"new",
"README",
"file",
"locally",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/index.py#L166-L178 |
27,890 | src-d/modelforge | modelforge/index.py | GitIndex.reset | def reset(self):
"""Initialize the remote Git repository."""
paths = []
for filename in os.listdir(self.cached_repo):
if filename.startswith(".git"):
continue
path = os.path.join(self.cached_repo, filename)
if os.path.isfile(path):
paths.append(path)
elif os.path.isdir(path):
for model in os.listdir(path):
paths.append(os.path.join(path, model))
git.remove(self.cached_repo, paths)
self.contents = {"models": {}, "meta": {}} | python | def reset(self):
paths = []
for filename in os.listdir(self.cached_repo):
if filename.startswith(".git"):
continue
path = os.path.join(self.cached_repo, filename)
if os.path.isfile(path):
paths.append(path)
elif os.path.isdir(path):
for model in os.listdir(path):
paths.append(os.path.join(path, model))
git.remove(self.cached_repo, paths)
self.contents = {"models": {}, "meta": {}} | [
"def",
"reset",
"(",
"self",
")",
":",
"paths",
"=",
"[",
"]",
"for",
"filename",
"in",
"os",
".",
"listdir",
"(",
"self",
".",
"cached_repo",
")",
":",
"if",
"filename",
".",
"startswith",
"(",
"\".git\"",
")",
":",
"continue",
"path",
"=",
"os",
... | Initialize the remote Git repository. | [
"Initialize",
"the",
"remote",
"Git",
"repository",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/index.py#L180-L193 |
27,891 | src-d/modelforge | modelforge/index.py | GitIndex.upload | def upload(self, cmd: str, meta: dict):
"""Push the current state of the registry to Git."""
index = os.path.join(self.cached_repo, self.INDEX_FILE)
if os.path.exists(index):
os.remove(index)
self._log.info("Writing the new index.json ...")
with open(index, "w") as _out:
json.dump(self.contents, _out)
git.add(self.cached_repo, [index])
message = self.COMMIT_MESSAGES[cmd].format(**meta)
if self.signoff:
global_conf_path = os.path.expanduser("~/.gitconfig")
if os.path.exists(global_conf_path):
with open(global_conf_path, "br") as _in:
conf = ConfigFile.from_file(_in)
try:
name = conf.get(b"user", b"name").decode()
email = conf.get(b"user", b"email").decode()
message += self.DCO_MESSAGE.format(name=name, email=email)
except KeyError:
self._log.warning(
"Did not find name or email in %s, committing without DCO.",
global_conf_path)
else:
self._log.warning("Global git configuration file %s does not exist, "
"committing without DCO.", global_conf_path)
else:
self._log.info("Committing the index without DCO.")
git.commit(self.cached_repo, message=message)
self._log.info("Pushing the updated index ...")
# TODO: change when https://github.com/dulwich/dulwich/issues/631 gets addressed
git.push(self.cached_repo, self.remote_url, b"master")
if self._are_local_and_remote_heads_different():
self._log.error("Push has failed")
raise ValueError("Push has failed") | python | def upload(self, cmd: str, meta: dict):
index = os.path.join(self.cached_repo, self.INDEX_FILE)
if os.path.exists(index):
os.remove(index)
self._log.info("Writing the new index.json ...")
with open(index, "w") as _out:
json.dump(self.contents, _out)
git.add(self.cached_repo, [index])
message = self.COMMIT_MESSAGES[cmd].format(**meta)
if self.signoff:
global_conf_path = os.path.expanduser("~/.gitconfig")
if os.path.exists(global_conf_path):
with open(global_conf_path, "br") as _in:
conf = ConfigFile.from_file(_in)
try:
name = conf.get(b"user", b"name").decode()
email = conf.get(b"user", b"email").decode()
message += self.DCO_MESSAGE.format(name=name, email=email)
except KeyError:
self._log.warning(
"Did not find name or email in %s, committing without DCO.",
global_conf_path)
else:
self._log.warning("Global git configuration file %s does not exist, "
"committing without DCO.", global_conf_path)
else:
self._log.info("Committing the index without DCO.")
git.commit(self.cached_repo, message=message)
self._log.info("Pushing the updated index ...")
# TODO: change when https://github.com/dulwich/dulwich/issues/631 gets addressed
git.push(self.cached_repo, self.remote_url, b"master")
if self._are_local_and_remote_heads_different():
self._log.error("Push has failed")
raise ValueError("Push has failed") | [
"def",
"upload",
"(",
"self",
",",
"cmd",
":",
"str",
",",
"meta",
":",
"dict",
")",
":",
"index",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"cached_repo",
",",
"self",
".",
"INDEX_FILE",
")",
"if",
"os",
".",
"path",
".",
"exists",
... | Push the current state of the registry to Git. | [
"Push",
"the",
"current",
"state",
"of",
"the",
"registry",
"to",
"Git",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/index.py#L195-L229 |
27,892 | src-d/modelforge | modelforge/index.py | GitIndex.load_template | def load_template(self, template: str) -> Template:
"""Load a Jinja2 template from the source directory."""
env = dict(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=False)
jinja2_ext = ".jinja2"
if not template.endswith(jinja2_ext):
self._log.error("Template file name must end with %s" % jinja2_ext)
raise ValueError
if not template[:-len(jinja2_ext)].endswith(".md"):
self._log.error("Template file should be a Markdown file.")
raise ValueError
if not os.path.isabs(template):
template = os.path.join(os.path.dirname(__file__), template)
with open(template, encoding="utf-8") as fin:
template_obj = Template(fin.read(), **env)
template_obj.filename = template
self._log.info("Loaded %s", template)
return template_obj | python | def load_template(self, template: str) -> Template:
env = dict(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=False)
jinja2_ext = ".jinja2"
if not template.endswith(jinja2_ext):
self._log.error("Template file name must end with %s" % jinja2_ext)
raise ValueError
if not template[:-len(jinja2_ext)].endswith(".md"):
self._log.error("Template file should be a Markdown file.")
raise ValueError
if not os.path.isabs(template):
template = os.path.join(os.path.dirname(__file__), template)
with open(template, encoding="utf-8") as fin:
template_obj = Template(fin.read(), **env)
template_obj.filename = template
self._log.info("Loaded %s", template)
return template_obj | [
"def",
"load_template",
"(",
"self",
",",
"template",
":",
"str",
")",
"->",
"Template",
":",
"env",
"=",
"dict",
"(",
"trim_blocks",
"=",
"True",
",",
"lstrip_blocks",
"=",
"True",
",",
"keep_trailing_newline",
"=",
"False",
")",
"jinja2_ext",
"=",
"\".ji... | Load a Jinja2 template from the source directory. | [
"Load",
"a",
"Jinja2",
"template",
"from",
"the",
"source",
"directory",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/index.py#L231-L247 |
27,893 | src-d/modelforge | modelforge/progress_bar.py | progress_bar | def progress_bar(enumerable, logger, **kwargs):
"""
Show the progress bar in the terminal, if the logging level matches and we are interactive.
:param enumerable: The iterator of which we indicate the progress.
:param logger: The bound logging.Logger.
:param kwargs: Keyword arguments to pass to clint.textui.progress.bar.
:return: The wrapped iterator.
"""
if not logger.isEnabledFor(logging.INFO) or sys.stdin.closed or not sys.stdin.isatty():
return enumerable
return progress.bar(enumerable, **kwargs) | python | def progress_bar(enumerable, logger, **kwargs):
if not logger.isEnabledFor(logging.INFO) or sys.stdin.closed or not sys.stdin.isatty():
return enumerable
return progress.bar(enumerable, **kwargs) | [
"def",
"progress_bar",
"(",
"enumerable",
",",
"logger",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"logger",
".",
"isEnabledFor",
"(",
"logging",
".",
"INFO",
")",
"or",
"sys",
".",
"stdin",
".",
"closed",
"or",
"not",
"sys",
".",
"stdin",
".",... | Show the progress bar in the terminal, if the logging level matches and we are interactive.
:param enumerable: The iterator of which we indicate the progress.
:param logger: The bound logging.Logger.
:param kwargs: Keyword arguments to pass to clint.textui.progress.bar.
:return: The wrapped iterator. | [
"Show",
"the",
"progress",
"bar",
"in",
"the",
"terminal",
"if",
"the",
"logging",
"level",
"matches",
"and",
"we",
"are",
"interactive",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/progress_bar.py#L7-L18 |
27,894 | src-d/modelforge | modelforge/environment.py | collect_environment | def collect_environment(no_cache: bool = False) -> dict:
"""
Return the version of the Python executable, the versions of the currently loaded packages \
and the running platform.
The result is cached unless `no_cache` is True.
"""
global _env
if _env is None or no_cache:
_env = collect_environment_without_packages()
_env["packages"] = collect_loaded_packages()
return _env | python | def collect_environment(no_cache: bool = False) -> dict:
global _env
if _env is None or no_cache:
_env = collect_environment_without_packages()
_env["packages"] = collect_loaded_packages()
return _env | [
"def",
"collect_environment",
"(",
"no_cache",
":",
"bool",
"=",
"False",
")",
"->",
"dict",
":",
"global",
"_env",
"if",
"_env",
"is",
"None",
"or",
"no_cache",
":",
"_env",
"=",
"collect_environment_without_packages",
"(",
")",
"_env",
"[",
"\"packages\"",
... | Return the version of the Python executable, the versions of the currently loaded packages \
and the running platform.
The result is cached unless `no_cache` is True. | [
"Return",
"the",
"version",
"of",
"the",
"Python",
"executable",
"the",
"versions",
"of",
"the",
"currently",
"loaded",
"packages",
"\\",
"and",
"the",
"running",
"platform",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/environment.py#L28-L39 |
27,895 | src-d/modelforge | modelforge/environment.py | collect_loaded_packages | def collect_loaded_packages() -> List[Tuple[str, str]]:
"""
Return the currently loaded package names and their versions.
"""
dists = get_installed_distributions()
get_dist_files = DistFilesFinder()
file_table = {}
for dist in dists:
for file in get_dist_files(dist):
file_table[file] = dist
used_dists = set()
# we greedily load all values to a list to avoid weird
# "dictionary changed size during iteration" errors
for module in list(sys.modules.values()):
try:
dist = file_table[module.__file__]
except (AttributeError, KeyError):
continue
used_dists.add(dist)
return sorted((dist.project_name, dist.version) for dist in used_dists) | python | def collect_loaded_packages() -> List[Tuple[str, str]]:
dists = get_installed_distributions()
get_dist_files = DistFilesFinder()
file_table = {}
for dist in dists:
for file in get_dist_files(dist):
file_table[file] = dist
used_dists = set()
# we greedily load all values to a list to avoid weird
# "dictionary changed size during iteration" errors
for module in list(sys.modules.values()):
try:
dist = file_table[module.__file__]
except (AttributeError, KeyError):
continue
used_dists.add(dist)
return sorted((dist.project_name, dist.version) for dist in used_dists) | [
"def",
"collect_loaded_packages",
"(",
")",
"->",
"List",
"[",
"Tuple",
"[",
"str",
",",
"str",
"]",
"]",
":",
"dists",
"=",
"get_installed_distributions",
"(",
")",
"get_dist_files",
"=",
"DistFilesFinder",
"(",
")",
"file_table",
"=",
"{",
"}",
"for",
"d... | Return the currently loaded package names and their versions. | [
"Return",
"the",
"currently",
"loaded",
"package",
"names",
"and",
"their",
"versions",
"."
] | 4f73c2bf0318261ac01bc8b6c0d4250a5d303418 | https://github.com/src-d/modelforge/blob/4f73c2bf0318261ac01bc8b6c0d4250a5d303418/modelforge/environment.py#L42-L61 |
27,896 | criteo/gourde | gourde/gourde.py | Gourde.setup_blueprint | def setup_blueprint(self):
"""Initialize the blueprint."""
# Register endpoints.
self.blueprint.add_url_rule("/", "status", self.status)
self.blueprint.add_url_rule("/healthy", "health", self.healthy)
self.blueprint.add_url_rule("/ready", "ready", self.ready)
self.blueprint.add_url_rule("/threads", "threads", self.threads_bt) | python | def setup_blueprint(self):
# Register endpoints.
self.blueprint.add_url_rule("/", "status", self.status)
self.blueprint.add_url_rule("/healthy", "health", self.healthy)
self.blueprint.add_url_rule("/ready", "ready", self.ready)
self.blueprint.add_url_rule("/threads", "threads", self.threads_bt) | [
"def",
"setup_blueprint",
"(",
"self",
")",
":",
"# Register endpoints.",
"self",
".",
"blueprint",
".",
"add_url_rule",
"(",
"\"/\"",
",",
"\"status\"",
",",
"self",
".",
"status",
")",
"self",
".",
"blueprint",
".",
"add_url_rule",
"(",
"\"/healthy\"",
",",
... | Initialize the blueprint. | [
"Initialize",
"the",
"blueprint",
"."
] | 9a274e534a2af5d2b2a5e99f10c59010adb94863 | https://github.com/criteo/gourde/blob/9a274e534a2af5d2b2a5e99f10c59010adb94863/gourde/gourde.py#L69-L76 |
27,897 | criteo/gourde | gourde/gourde.py | Gourde._add_routes | def _add_routes(self):
"""Add some nice default routes."""
if self.app.has_static_folder:
self.add_url_rule("/favicon.ico", "favicon", self.favicon)
self.add_url_rule("/", "__default_redirect_to_status", self.redirect_to_status) | python | def _add_routes(self):
if self.app.has_static_folder:
self.add_url_rule("/favicon.ico", "favicon", self.favicon)
self.add_url_rule("/", "__default_redirect_to_status", self.redirect_to_status) | [
"def",
"_add_routes",
"(",
"self",
")",
":",
"if",
"self",
".",
"app",
".",
"has_static_folder",
":",
"self",
".",
"add_url_rule",
"(",
"\"/favicon.ico\"",
",",
"\"favicon\"",
",",
"self",
".",
"favicon",
")",
"self",
".",
"add_url_rule",
"(",
"\"/\"",
","... | Add some nice default routes. | [
"Add",
"some",
"nice",
"default",
"routes",
"."
] | 9a274e534a2af5d2b2a5e99f10c59010adb94863 | https://github.com/criteo/gourde/blob/9a274e534a2af5d2b2a5e99f10c59010adb94863/gourde/gourde.py#L78-L82 |
27,898 | criteo/gourde | gourde/gourde.py | Gourde.get_argparser | def get_argparser(parser=None):
"""Customize a parser to get the correct options."""
parser = parser or argparse.ArgumentParser()
parser.add_argument("--host", default="0.0.0.0", help="Host listen address")
parser.add_argument("--port", "-p", default=9050, help="Listen port", type=int)
parser.add_argument(
"--debug",
"-d",
default=False,
action="store_true",
help="Enable debug mode",
)
parser.add_argument(
"--log-level",
"-l",
default="INFO",
help="Log Level, empty string to disable.",
)
parser.add_argument(
"--twisted",
default=False,
action="store_true",
help="Use twisted to server requests.",
)
parser.add_argument(
"--gunicorn",
default=False,
action="store_true",
help="Use gunicorn to server requests.",
)
parser.add_argument(
"--threads", default=None, help="Number of threads to use.", type=int
)
parser.add_argument("--disable-embedded-logging",
default=False,
action="store_true",
help="Disable embedded logging configuration")
return parser | python | def get_argparser(parser=None):
parser = parser or argparse.ArgumentParser()
parser.add_argument("--host", default="0.0.0.0", help="Host listen address")
parser.add_argument("--port", "-p", default=9050, help="Listen port", type=int)
parser.add_argument(
"--debug",
"-d",
default=False,
action="store_true",
help="Enable debug mode",
)
parser.add_argument(
"--log-level",
"-l",
default="INFO",
help="Log Level, empty string to disable.",
)
parser.add_argument(
"--twisted",
default=False,
action="store_true",
help="Use twisted to server requests.",
)
parser.add_argument(
"--gunicorn",
default=False,
action="store_true",
help="Use gunicorn to server requests.",
)
parser.add_argument(
"--threads", default=None, help="Number of threads to use.", type=int
)
parser.add_argument("--disable-embedded-logging",
default=False,
action="store_true",
help="Disable embedded logging configuration")
return parser | [
"def",
"get_argparser",
"(",
"parser",
"=",
"None",
")",
":",
"parser",
"=",
"parser",
"or",
"argparse",
".",
"ArgumentParser",
"(",
")",
"parser",
".",
"add_argument",
"(",
"\"--host\"",
",",
"default",
"=",
"\"0.0.0.0\"",
",",
"help",
"=",
"\"Host listen a... | Customize a parser to get the correct options. | [
"Customize",
"a",
"parser",
"to",
"get",
"the",
"correct",
"options",
"."
] | 9a274e534a2af5d2b2a5e99f10c59010adb94863 | https://github.com/criteo/gourde/blob/9a274e534a2af5d2b2a5e99f10c59010adb94863/gourde/gourde.py#L115-L152 |
27,899 | criteo/gourde | gourde/gourde.py | Gourde.setup_prometheus | def setup_prometheus(self, registry=None):
"""Setup Prometheus."""
kwargs = {}
if registry:
kwargs["registry"] = registry
self.metrics = PrometheusMetrics(self.app, **kwargs)
try:
version = pkg_resources.require(self.app.name)[0].version
except pkg_resources.DistributionNotFound:
version = "unknown"
self.metrics.info(
"app_info", "Application info", version=version, appname=self.app.name
)
self.app.logger.info("Prometheus is enabled.") | python | def setup_prometheus(self, registry=None):
kwargs = {}
if registry:
kwargs["registry"] = registry
self.metrics = PrometheusMetrics(self.app, **kwargs)
try:
version = pkg_resources.require(self.app.name)[0].version
except pkg_resources.DistributionNotFound:
version = "unknown"
self.metrics.info(
"app_info", "Application info", version=version, appname=self.app.name
)
self.app.logger.info("Prometheus is enabled.") | [
"def",
"setup_prometheus",
"(",
"self",
",",
"registry",
"=",
"None",
")",
":",
"kwargs",
"=",
"{",
"}",
"if",
"registry",
":",
"kwargs",
"[",
"\"registry\"",
"]",
"=",
"registry",
"self",
".",
"metrics",
"=",
"PrometheusMetrics",
"(",
"self",
".",
"app"... | Setup Prometheus. | [
"Setup",
"Prometheus",
"."
] | 9a274e534a2af5d2b2a5e99f10c59010adb94863 | https://github.com/criteo/gourde/blob/9a274e534a2af5d2b2a5e99f10c59010adb94863/gourde/gourde.py#L169-L182 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.