repo stringlengths 7 55 | path stringlengths 4 223 | func_name stringlengths 1 134 | original_string stringlengths 75 104k | language stringclasses 1 value | code stringlengths 75 104k | code_tokens listlengths 19 28.4k | docstring stringlengths 1 46.9k | docstring_tokens listlengths 1 1.97k | sha stringlengths 40 40 | url stringlengths 87 315 | partition stringclasses 1 value |
|---|---|---|---|---|---|---|---|---|---|---|---|
nyaruka/smartmin | smartmin/views.py | SmartFormMixin.get_success_url | def get_success_url(self):
"""
By default we use the referer that was stuffed in our
form when it was created
"""
if self.success_url:
# if our smart url references an object, pass that in
if self.success_url.find('@') > 0:
return smart_url(self.success_url, self.object)
else:
return smart_url(self.success_url, None)
elif 'loc' in self.form.cleaned_data:
return self.form.cleaned_data['loc']
raise ImproperlyConfigured("No redirect location found, override get_success_url to not use redirect urls") | python | def get_success_url(self):
"""
By default we use the referer that was stuffed in our
form when it was created
"""
if self.success_url:
# if our smart url references an object, pass that in
if self.success_url.find('@') > 0:
return smart_url(self.success_url, self.object)
else:
return smart_url(self.success_url, None)
elif 'loc' in self.form.cleaned_data:
return self.form.cleaned_data['loc']
raise ImproperlyConfigured("No redirect location found, override get_success_url to not use redirect urls") | [
"def",
"get_success_url",
"(",
"self",
")",
":",
"if",
"self",
".",
"success_url",
":",
"# if our smart url references an object, pass that in",
"if",
"self",
".",
"success_url",
".",
"find",
"(",
"'@'",
")",
">",
"0",
":",
"return",
"smart_url",
"(",
"self",
... | By default we use the referer that was stuffed in our
form when it was created | [
"By",
"default",
"we",
"use",
"the",
"referer",
"that",
"was",
"stuffed",
"in",
"our",
"form",
"when",
"it",
"was",
"created"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1001-L1016 | train |
nyaruka/smartmin | smartmin/views.py | SmartFormMixin.get_form_kwargs | def get_form_kwargs(self):
"""
We override this, using only those fields specified if they are specified.
Otherwise we include all fields in a standard ModelForm.
"""
kwargs = super(SmartFormMixin, self).get_form_kwargs()
kwargs['initial'] = self.derive_initial()
return kwargs | python | def get_form_kwargs(self):
"""
We override this, using only those fields specified if they are specified.
Otherwise we include all fields in a standard ModelForm.
"""
kwargs = super(SmartFormMixin, self).get_form_kwargs()
kwargs['initial'] = self.derive_initial()
return kwargs | [
"def",
"get_form_kwargs",
"(",
"self",
")",
":",
"kwargs",
"=",
"super",
"(",
"SmartFormMixin",
",",
"self",
")",
".",
"get_form_kwargs",
"(",
")",
"kwargs",
"[",
"'initial'",
"]",
"=",
"self",
".",
"derive_initial",
"(",
")",
"return",
"kwargs"
] | We override this, using only those fields specified if they are specified.
Otherwise we include all fields in a standard ModelForm. | [
"We",
"override",
"this",
"using",
"only",
"those",
"fields",
"specified",
"if",
"they",
"are",
"specified",
"."
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1024-L1032 | train |
nyaruka/smartmin | smartmin/views.py | SmartCreateView.derive_title | def derive_title(self):
"""
Derives our title from our object
"""
if not self.title:
return _("Create %s") % force_text(self.model._meta.verbose_name).title()
else:
return self.title | python | def derive_title(self):
"""
Derives our title from our object
"""
if not self.title:
return _("Create %s") % force_text(self.model._meta.verbose_name).title()
else:
return self.title | [
"def",
"derive_title",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"title",
":",
"return",
"_",
"(",
"\"Create %s\"",
")",
"%",
"force_text",
"(",
"self",
".",
"model",
".",
"_meta",
".",
"verbose_name",
")",
".",
"title",
"(",
")",
"else",
":",... | Derives our title from our object | [
"Derives",
"our",
"title",
"from",
"our",
"object"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1276-L1283 | train |
nyaruka/smartmin | smartmin/views.py | SmartCRUDL.permission_for_action | def permission_for_action(self, action):
"""
Returns the permission to use for the passed in action
"""
return "%s.%s_%s" % (self.app_name.lower(), self.model_name.lower(), action) | python | def permission_for_action(self, action):
"""
Returns the permission to use for the passed in action
"""
return "%s.%s_%s" % (self.app_name.lower(), self.model_name.lower(), action) | [
"def",
"permission_for_action",
"(",
"self",
",",
"action",
")",
":",
"return",
"\"%s.%s_%s\"",
"%",
"(",
"self",
".",
"app_name",
".",
"lower",
"(",
")",
",",
"self",
".",
"model_name",
".",
"lower",
"(",
")",
",",
"action",
")"
] | Returns the permission to use for the passed in action | [
"Returns",
"the",
"permission",
"to",
"use",
"for",
"the",
"passed",
"in",
"action"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1349-L1353 | train |
nyaruka/smartmin | smartmin/views.py | SmartCRUDL.template_for_action | def template_for_action(self, action):
"""
Returns the template to use for the passed in action
"""
return "%s/%s_%s.html" % (self.module_name.lower(), self.model_name.lower(), action) | python | def template_for_action(self, action):
"""
Returns the template to use for the passed in action
"""
return "%s/%s_%s.html" % (self.module_name.lower(), self.model_name.lower(), action) | [
"def",
"template_for_action",
"(",
"self",
",",
"action",
")",
":",
"return",
"\"%s/%s_%s.html\"",
"%",
"(",
"self",
".",
"module_name",
".",
"lower",
"(",
")",
",",
"self",
".",
"model_name",
".",
"lower",
"(",
")",
",",
"action",
")"
] | Returns the template to use for the passed in action | [
"Returns",
"the",
"template",
"to",
"use",
"for",
"the",
"passed",
"in",
"action"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1355-L1359 | train |
nyaruka/smartmin | smartmin/views.py | SmartCRUDL.url_name_for_action | def url_name_for_action(self, action):
"""
Returns the reverse name for this action
"""
return "%s.%s_%s" % (self.module_name.lower(), self.model_name.lower(), action) | python | def url_name_for_action(self, action):
"""
Returns the reverse name for this action
"""
return "%s.%s_%s" % (self.module_name.lower(), self.model_name.lower(), action) | [
"def",
"url_name_for_action",
"(",
"self",
",",
"action",
")",
":",
"return",
"\"%s.%s_%s\"",
"%",
"(",
"self",
".",
"module_name",
".",
"lower",
"(",
")",
",",
"self",
".",
"model_name",
".",
"lower",
"(",
")",
",",
"action",
")"
] | Returns the reverse name for this action | [
"Returns",
"the",
"reverse",
"name",
"for",
"this",
"action"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1361-L1365 | train |
nyaruka/smartmin | smartmin/views.py | SmartCRUDL.view_for_action | def view_for_action(self, action):
"""
Returns the appropriate view class for the passed in action
"""
# this turns replace_foo into ReplaceFoo and read into Read
class_name = "".join([word.capitalize() for word in action.split("_")])
view = None
# see if we have a custom class defined for this action
if hasattr(self, class_name):
# return that one
view = getattr(self, class_name)
# no model set? set it ourselves
if not getattr(view, 'model', None):
view.model = self.model
# no permission and we are supposed to set them, do so
if not hasattr(view, 'permission') and self.permissions:
view.permission = self.permission_for_action(action)
# set our link URL based on read and update
if not getattr(view, 'link_url', None):
if 'read' in self.actions:
view.link_url = 'id@%s' % self.url_name_for_action('read')
elif 'update' in self.actions:
view.link_url = 'id@%s' % self.url_name_for_action('update')
# if we can't infer a link URL then view class must override lookup_field_link
if not getattr(view, 'link_url', None) and 'lookup_field_link' not in view.__dict__:
view.link_fields = ()
# set add_button based on existence of Create view if add_button not explicitly set
if action == 'list' and getattr(view, 'add_button', None) is None:
view.add_button = 'create' in self.actions
# set edit_button based on existence of Update view if edit_button not explicitly set
if action == 'read' and getattr(view, 'edit_button', None) is None:
view.edit_button = 'update' in self.actions
# if update or create, set success url if not set
if not getattr(view, 'success_url', None) and (action == 'update' or action == 'create'):
view.success_url = '@%s' % self.url_name_for_action('list')
# otherwise, use our defaults
else:
options = dict(model=self.model)
# if this is an update or create, and we have a list view, then set the default to that
if action == 'update' or action == 'create' and 'list' in self.actions:
options['success_url'] = '@%s' % self.url_name_for_action('list')
# set permissions if appropriate
if self.permissions:
options['permission'] = self.permission_for_action(action)
if action == 'create':
view = type(str("%sCreateView" % self.model_name), (SmartCreateView,), options)
elif action == 'read':
if 'update' in self.actions:
options['edit_button'] = True
view = type(str("%sReadView" % self.model_name), (SmartReadView,), options)
elif action == 'update':
if 'delete' in self.actions:
options['delete_url'] = 'id@%s' % self.url_name_for_action('delete')
view = type(str("%sUpdateView" % self.model_name), (SmartUpdateView,), options)
elif action == 'delete':
if 'list' in self.actions:
options['cancel_url'] = '@%s' % self.url_name_for_action('list')
options['redirect_url'] = '@%s' % self.url_name_for_action('list')
elif 'update' in self.actions:
options['cancel_url'] = '@%s' % self.url_name_for_action('update')
view = type(str("%sDeleteView" % self.model_name), (SmartDeleteView,), options)
elif action == 'list':
if 'read' in self.actions:
options['link_url'] = 'id@%s' % self.url_name_for_action('read')
elif 'update' in self.actions:
options['link_url'] = 'id@%s' % self.url_name_for_action('update')
else:
options['link_fields'] = ()
if 'create' in self.actions:
options['add_button'] = True
view = type(str("%sListView" % self.model_name), (SmartListView,), options)
elif action == 'csv_import':
options['model'] = ImportTask
view = type(str("%sCSVImportView" % self.model_name), (SmartCSVImportView,), options)
if not view:
# couldn't find a view? blow up
raise Exception("No view found for action: %s" % action)
# set the url name for this view
view.url_name = self.url_name_for_action(action)
# no template set for it? set one based on our action and app name
if not getattr(view, 'template_name', None):
view.template_name = self.template_for_action(action)
view.crudl = self
return view | python | def view_for_action(self, action):
"""
Returns the appropriate view class for the passed in action
"""
# this turns replace_foo into ReplaceFoo and read into Read
class_name = "".join([word.capitalize() for word in action.split("_")])
view = None
# see if we have a custom class defined for this action
if hasattr(self, class_name):
# return that one
view = getattr(self, class_name)
# no model set? set it ourselves
if not getattr(view, 'model', None):
view.model = self.model
# no permission and we are supposed to set them, do so
if not hasattr(view, 'permission') and self.permissions:
view.permission = self.permission_for_action(action)
# set our link URL based on read and update
if not getattr(view, 'link_url', None):
if 'read' in self.actions:
view.link_url = 'id@%s' % self.url_name_for_action('read')
elif 'update' in self.actions:
view.link_url = 'id@%s' % self.url_name_for_action('update')
# if we can't infer a link URL then view class must override lookup_field_link
if not getattr(view, 'link_url', None) and 'lookup_field_link' not in view.__dict__:
view.link_fields = ()
# set add_button based on existence of Create view if add_button not explicitly set
if action == 'list' and getattr(view, 'add_button', None) is None:
view.add_button = 'create' in self.actions
# set edit_button based on existence of Update view if edit_button not explicitly set
if action == 'read' and getattr(view, 'edit_button', None) is None:
view.edit_button = 'update' in self.actions
# if update or create, set success url if not set
if not getattr(view, 'success_url', None) and (action == 'update' or action == 'create'):
view.success_url = '@%s' % self.url_name_for_action('list')
# otherwise, use our defaults
else:
options = dict(model=self.model)
# if this is an update or create, and we have a list view, then set the default to that
if action == 'update' or action == 'create' and 'list' in self.actions:
options['success_url'] = '@%s' % self.url_name_for_action('list')
# set permissions if appropriate
if self.permissions:
options['permission'] = self.permission_for_action(action)
if action == 'create':
view = type(str("%sCreateView" % self.model_name), (SmartCreateView,), options)
elif action == 'read':
if 'update' in self.actions:
options['edit_button'] = True
view = type(str("%sReadView" % self.model_name), (SmartReadView,), options)
elif action == 'update':
if 'delete' in self.actions:
options['delete_url'] = 'id@%s' % self.url_name_for_action('delete')
view = type(str("%sUpdateView" % self.model_name), (SmartUpdateView,), options)
elif action == 'delete':
if 'list' in self.actions:
options['cancel_url'] = '@%s' % self.url_name_for_action('list')
options['redirect_url'] = '@%s' % self.url_name_for_action('list')
elif 'update' in self.actions:
options['cancel_url'] = '@%s' % self.url_name_for_action('update')
view = type(str("%sDeleteView" % self.model_name), (SmartDeleteView,), options)
elif action == 'list':
if 'read' in self.actions:
options['link_url'] = 'id@%s' % self.url_name_for_action('read')
elif 'update' in self.actions:
options['link_url'] = 'id@%s' % self.url_name_for_action('update')
else:
options['link_fields'] = ()
if 'create' in self.actions:
options['add_button'] = True
view = type(str("%sListView" % self.model_name), (SmartListView,), options)
elif action == 'csv_import':
options['model'] = ImportTask
view = type(str("%sCSVImportView" % self.model_name), (SmartCSVImportView,), options)
if not view:
# couldn't find a view? blow up
raise Exception("No view found for action: %s" % action)
# set the url name for this view
view.url_name = self.url_name_for_action(action)
# no template set for it? set one based on our action and app name
if not getattr(view, 'template_name', None):
view.template_name = self.template_for_action(action)
view.crudl = self
return view | [
"def",
"view_for_action",
"(",
"self",
",",
"action",
")",
":",
"# this turns replace_foo into ReplaceFoo and read into Read",
"class_name",
"=",
"\"\"",
".",
"join",
"(",
"[",
"word",
".",
"capitalize",
"(",
")",
"for",
"word",
"in",
"action",
".",
"split",
"("... | Returns the appropriate view class for the passed in action | [
"Returns",
"the",
"appropriate",
"view",
"class",
"for",
"the",
"passed",
"in",
"action"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1367-L1478 | train |
nyaruka/smartmin | smartmin/views.py | SmartCRUDL.pattern_for_view | def pattern_for_view(self, view, action):
"""
Returns the URL pattern for the passed in action.
"""
# if this view knows how to define a URL pattern, call that
if getattr(view, 'derive_url_pattern', None):
return view.derive_url_pattern(self.path, action)
# otherwise take our best guess
else:
return r'^%s/%s/$' % (self.path, action) | python | def pattern_for_view(self, view, action):
"""
Returns the URL pattern for the passed in action.
"""
# if this view knows how to define a URL pattern, call that
if getattr(view, 'derive_url_pattern', None):
return view.derive_url_pattern(self.path, action)
# otherwise take our best guess
else:
return r'^%s/%s/$' % (self.path, action) | [
"def",
"pattern_for_view",
"(",
"self",
",",
"view",
",",
"action",
")",
":",
"# if this view knows how to define a URL pattern, call that",
"if",
"getattr",
"(",
"view",
",",
"'derive_url_pattern'",
",",
"None",
")",
":",
"return",
"view",
".",
"derive_url_pattern",
... | Returns the URL pattern for the passed in action. | [
"Returns",
"the",
"URL",
"pattern",
"for",
"the",
"passed",
"in",
"action",
"."
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1480-L1490 | train |
nyaruka/smartmin | smartmin/views.py | SmartCRUDL.as_urlpatterns | def as_urlpatterns(self):
"""
Creates the appropriate URLs for this object.
"""
urls = []
# for each of our actions
for action in self.actions:
view_class = self.view_for_action(action)
view_pattern = self.pattern_for_view(view_class, action)
name = self.url_name_for_action(action)
urls.append(url(view_pattern, view_class.as_view(), name=name))
return urls | python | def as_urlpatterns(self):
"""
Creates the appropriate URLs for this object.
"""
urls = []
# for each of our actions
for action in self.actions:
view_class = self.view_for_action(action)
view_pattern = self.pattern_for_view(view_class, action)
name = self.url_name_for_action(action)
urls.append(url(view_pattern, view_class.as_view(), name=name))
return urls | [
"def",
"as_urlpatterns",
"(",
"self",
")",
":",
"urls",
"=",
"[",
"]",
"# for each of our actions",
"for",
"action",
"in",
"self",
".",
"actions",
":",
"view_class",
"=",
"self",
".",
"view_for_action",
"(",
"action",
")",
"view_pattern",
"=",
"self",
".",
... | Creates the appropriate URLs for this object. | [
"Creates",
"the",
"appropriate",
"URLs",
"for",
"this",
"object",
"."
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/views.py#L1492-L1505 | train |
nyaruka/smartmin | smartmin/management/commands/collect_sql.py | Command.load_migrations | def load_migrations(self): # pragma: no cover
"""
Loads all migrations in the order they would be applied to a clean database
"""
executor = MigrationExecutor(connection=None)
# create the forwards plan Django would follow on an empty database
plan = executor.migration_plan(executor.loader.graph.leaf_nodes(), clean_start=True)
if self.verbosity >= 2:
for migration, _ in plan:
self.stdout.write(" > %s" % migration)
return [m[0] for m in plan] | python | def load_migrations(self): # pragma: no cover
"""
Loads all migrations in the order they would be applied to a clean database
"""
executor = MigrationExecutor(connection=None)
# create the forwards plan Django would follow on an empty database
plan = executor.migration_plan(executor.loader.graph.leaf_nodes(), clean_start=True)
if self.verbosity >= 2:
for migration, _ in plan:
self.stdout.write(" > %s" % migration)
return [m[0] for m in plan] | [
"def",
"load_migrations",
"(",
"self",
")",
":",
"# pragma: no cover",
"executor",
"=",
"MigrationExecutor",
"(",
"connection",
"=",
"None",
")",
"# create the forwards plan Django would follow on an empty database",
"plan",
"=",
"executor",
".",
"migration_plan",
"(",
"e... | Loads all migrations in the order they would be applied to a clean database | [
"Loads",
"all",
"migrations",
"in",
"the",
"order",
"they",
"would",
"be",
"applied",
"to",
"a",
"clean",
"database"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/management/commands/collect_sql.py#L120-L133 | train |
nyaruka/smartmin | smartmin/management/commands/collect_sql.py | Command.extract_operations | def extract_operations(self, migrations):
"""
Extract SQL operations from the given migrations
"""
operations = []
for migration in migrations:
for operation in migration.operations:
if isinstance(operation, RunSQL):
statements = sqlparse.parse(dedent(operation.sql))
for statement in statements:
operation = SqlObjectOperation.parse(statement)
if operation:
operations.append(operation)
if self.verbosity >= 2:
self.stdout.write(" > % -100s (%s)" % (operation, migration))
return operations | python | def extract_operations(self, migrations):
"""
Extract SQL operations from the given migrations
"""
operations = []
for migration in migrations:
for operation in migration.operations:
if isinstance(operation, RunSQL):
statements = sqlparse.parse(dedent(operation.sql))
for statement in statements:
operation = SqlObjectOperation.parse(statement)
if operation:
operations.append(operation)
if self.verbosity >= 2:
self.stdout.write(" > % -100s (%s)" % (operation, migration))
return operations | [
"def",
"extract_operations",
"(",
"self",
",",
"migrations",
")",
":",
"operations",
"=",
"[",
"]",
"for",
"migration",
"in",
"migrations",
":",
"for",
"operation",
"in",
"migration",
".",
"operations",
":",
"if",
"isinstance",
"(",
"operation",
",",
"RunSQL... | Extract SQL operations from the given migrations | [
"Extract",
"SQL",
"operations",
"from",
"the",
"given",
"migrations"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/management/commands/collect_sql.py#L135-L154 | train |
nyaruka/smartmin | smartmin/management/commands/collect_sql.py | Command.normalize_operations | def normalize_operations(self, operations):
"""
Removes redundant SQL operations - e.g. a CREATE X followed by a DROP X
"""
normalized = OrderedDict()
for operation in operations:
op_key = (operation.sql_type, operation.obj_name)
# do we already have an operation for this object?
if op_key in normalized:
if self.verbosity >= 2:
self.stdout.write(" < %s" % normalized[op_key])
del normalized[op_key]
# don't add DROP operations for objects not previously created
if operation.is_create:
normalized[op_key] = operation
elif self.verbosity >= 2:
self.stdout.write(" < %s" % operation)
return normalized.values() | python | def normalize_operations(self, operations):
"""
Removes redundant SQL operations - e.g. a CREATE X followed by a DROP X
"""
normalized = OrderedDict()
for operation in operations:
op_key = (operation.sql_type, operation.obj_name)
# do we already have an operation for this object?
if op_key in normalized:
if self.verbosity >= 2:
self.stdout.write(" < %s" % normalized[op_key])
del normalized[op_key]
# don't add DROP operations for objects not previously created
if operation.is_create:
normalized[op_key] = operation
elif self.verbosity >= 2:
self.stdout.write(" < %s" % operation)
return normalized.values() | [
"def",
"normalize_operations",
"(",
"self",
",",
"operations",
")",
":",
"normalized",
"=",
"OrderedDict",
"(",
")",
"for",
"operation",
"in",
"operations",
":",
"op_key",
"=",
"(",
"operation",
".",
"sql_type",
",",
"operation",
".",
"obj_name",
")",
"# do ... | Removes redundant SQL operations - e.g. a CREATE X followed by a DROP X | [
"Removes",
"redundant",
"SQL",
"operations",
"-",
"e",
".",
"g",
".",
"a",
"CREATE",
"X",
"followed",
"by",
"a",
"DROP",
"X"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/management/commands/collect_sql.py#L156-L178 | train |
nyaruka/smartmin | smartmin/management/commands/collect_sql.py | Command.write_type_dumps | def write_type_dumps(self, operations, preserve_order, output_dir):
"""
Splits the list of SQL operations by type and dumps these to separate files
"""
by_type = {SqlType.INDEX: [], SqlType.FUNCTION: [], SqlType.TRIGGER: []}
for operation in operations:
by_type[operation.sql_type].append(operation)
# optionally sort each operation list by the object name
if not preserve_order:
for obj_type, ops in by_type.items():
by_type[obj_type] = sorted(ops, key=lambda o: o.obj_name)
if by_type[SqlType.INDEX]:
self.write_dump('indexes', by_type[SqlType.INDEX], output_dir)
if by_type[SqlType.FUNCTION]:
self.write_dump('functions', by_type[SqlType.FUNCTION], output_dir)
if by_type[SqlType.TRIGGER]:
self.write_dump('triggers', by_type[SqlType.TRIGGER], output_dir) | python | def write_type_dumps(self, operations, preserve_order, output_dir):
"""
Splits the list of SQL operations by type and dumps these to separate files
"""
by_type = {SqlType.INDEX: [], SqlType.FUNCTION: [], SqlType.TRIGGER: []}
for operation in operations:
by_type[operation.sql_type].append(operation)
# optionally sort each operation list by the object name
if not preserve_order:
for obj_type, ops in by_type.items():
by_type[obj_type] = sorted(ops, key=lambda o: o.obj_name)
if by_type[SqlType.INDEX]:
self.write_dump('indexes', by_type[SqlType.INDEX], output_dir)
if by_type[SqlType.FUNCTION]:
self.write_dump('functions', by_type[SqlType.FUNCTION], output_dir)
if by_type[SqlType.TRIGGER]:
self.write_dump('triggers', by_type[SqlType.TRIGGER], output_dir) | [
"def",
"write_type_dumps",
"(",
"self",
",",
"operations",
",",
"preserve_order",
",",
"output_dir",
")",
":",
"by_type",
"=",
"{",
"SqlType",
".",
"INDEX",
":",
"[",
"]",
",",
"SqlType",
".",
"FUNCTION",
":",
"[",
"]",
",",
"SqlType",
".",
"TRIGGER",
... | Splits the list of SQL operations by type and dumps these to separate files | [
"Splits",
"the",
"list",
"of",
"SQL",
"operations",
"by",
"type",
"and",
"dumps",
"these",
"to",
"separate",
"files"
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/management/commands/collect_sql.py#L180-L198 | train |
nyaruka/smartmin | smartmin/widgets.py | VisibleHiddenWidget.render | def render(self, name, value, attrs=None, renderer=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
html = ''
html += '%s' % value
html += '<input type="hidden" name="%s" value="%s">' % (escape(name), escape(value))
return mark_safe(html) | python | def render(self, name, value, attrs=None, renderer=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
html = ''
html += '%s' % value
html += '<input type="hidden" name="%s" value="%s">' % (escape(name), escape(value))
return mark_safe(html) | [
"def",
"render",
"(",
"self",
",",
"name",
",",
"value",
",",
"attrs",
"=",
"None",
",",
"renderer",
"=",
"None",
")",
":",
"html",
"=",
"''",
"html",
"+=",
"'%s'",
"%",
"value",
"html",
"+=",
"'<input type=\"hidden\" name=\"%s\" value=\"%s\">'",
"%",
"(",... | Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively. | [
"Returns",
"this",
"Widget",
"rendered",
"as",
"HTML",
"as",
"a",
"Unicode",
"string",
"."
] | 488a676a4960555e4d216a7b95d6e01a4ad4efd8 | https://github.com/nyaruka/smartmin/blob/488a676a4960555e4d216a7b95d6e01a4ad4efd8/smartmin/widgets.py#L9-L19 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_atom_data | def add_atom_data(data_api, data_setters, atom_names, element_names, atom_charges, group_atom_ind):
"""Add the atomic data to the DataTransferInterface.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param atom_nams the list of atom names for the group
:param element_names the list of element names for this group
:param atom_charges the list formal atomic charges for this group
:param group_atom_ind the index of this atom in the group"""
atom_name = atom_names[group_atom_ind]
element = element_names[group_atom_ind]
charge = atom_charges[group_atom_ind]
alternative_location_id = data_api.alt_loc_list[data_api.atom_counter]
serial_number = data_api.atom_id_list[data_api.atom_counter]
x = data_api.x_coord_list[data_api.atom_counter]
y = data_api.y_coord_list[data_api.atom_counter]
z = data_api.z_coord_list[data_api.atom_counter]
occupancy = data_api.occupancy_list[data_api.atom_counter]
temperature_factor = data_api.b_factor_list[data_api.atom_counter]
data_setters.set_atom_info(atom_name, serial_number, alternative_location_id,
x, y, z, occupancy, temperature_factor, element, charge) | python | def add_atom_data(data_api, data_setters, atom_names, element_names, atom_charges, group_atom_ind):
"""Add the atomic data to the DataTransferInterface.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param atom_nams the list of atom names for the group
:param element_names the list of element names for this group
:param atom_charges the list formal atomic charges for this group
:param group_atom_ind the index of this atom in the group"""
atom_name = atom_names[group_atom_ind]
element = element_names[group_atom_ind]
charge = atom_charges[group_atom_ind]
alternative_location_id = data_api.alt_loc_list[data_api.atom_counter]
serial_number = data_api.atom_id_list[data_api.atom_counter]
x = data_api.x_coord_list[data_api.atom_counter]
y = data_api.y_coord_list[data_api.atom_counter]
z = data_api.z_coord_list[data_api.atom_counter]
occupancy = data_api.occupancy_list[data_api.atom_counter]
temperature_factor = data_api.b_factor_list[data_api.atom_counter]
data_setters.set_atom_info(atom_name, serial_number, alternative_location_id,
x, y, z, occupancy, temperature_factor, element, charge) | [
"def",
"add_atom_data",
"(",
"data_api",
",",
"data_setters",
",",
"atom_names",
",",
"element_names",
",",
"atom_charges",
",",
"group_atom_ind",
")",
":",
"atom_name",
"=",
"atom_names",
"[",
"group_atom_ind",
"]",
"element",
"=",
"element_names",
"[",
"group_at... | Add the atomic data to the DataTransferInterface.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param atom_nams the list of atom names for the group
:param element_names the list of element names for this group
:param atom_charges the list formal atomic charges for this group
:param group_atom_ind the index of this atom in the group | [
"Add",
"the",
"atomic",
"data",
"to",
"the",
"DataTransferInterface",
".",
":",
"param",
"data_api",
"the",
"data",
"api",
"from",
"where",
"to",
"get",
"the",
"data",
":",
"param",
"data_setters",
"the",
"class",
"to",
"push",
"the",
"data",
"to",
":",
... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L4-L23 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_group_bonds | def add_group_bonds(data_setters, bond_indices, bond_orders):
"""Add the bonds for this group.
:param data_setters the class to push the data to
:param bond_indices the indices of the atoms in the group that
are bonded (in pairs)
:param bond_orders the orders of the bonds"""
for bond_index in range(len(bond_orders)):
data_setters.set_group_bond(bond_indices[bond_index*2],bond_indices[bond_index*2+1],bond_orders[bond_index]) | python | def add_group_bonds(data_setters, bond_indices, bond_orders):
"""Add the bonds for this group.
:param data_setters the class to push the data to
:param bond_indices the indices of the atoms in the group that
are bonded (in pairs)
:param bond_orders the orders of the bonds"""
for bond_index in range(len(bond_orders)):
data_setters.set_group_bond(bond_indices[bond_index*2],bond_indices[bond_index*2+1],bond_orders[bond_index]) | [
"def",
"add_group_bonds",
"(",
"data_setters",
",",
"bond_indices",
",",
"bond_orders",
")",
":",
"for",
"bond_index",
"in",
"range",
"(",
"len",
"(",
"bond_orders",
")",
")",
":",
"data_setters",
".",
"set_group_bond",
"(",
"bond_indices",
"[",
"bond_index",
... | Add the bonds for this group.
:param data_setters the class to push the data to
:param bond_indices the indices of the atoms in the group that
are bonded (in pairs)
:param bond_orders the orders of the bonds | [
"Add",
"the",
"bonds",
"for",
"this",
"group",
".",
":",
"param",
"data_setters",
"the",
"class",
"to",
"push",
"the",
"data",
"to",
":",
"param",
"bond_indices",
"the",
"indices",
"of",
"the",
"atoms",
"in",
"the",
"group",
"that",
"are",
"bonded",
"(",... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L26-L33 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_group | def add_group(data_api, data_setters, group_index):
"""Add the data for a whole group.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param group_index the index for this group"""
group_type_ind = data_api.group_type_list[group_index]
atom_count = len(data_api.group_list[group_type_ind]["atomNameList"])
insertion_code = data_api.ins_code_list[group_index]
data_setters.set_group_info(data_api.group_list[group_type_ind]["groupName"],
data_api.group_id_list[group_index], insertion_code,
data_api.group_list[group_type_ind]["chemCompType"],
atom_count, data_api.num_bonds,
data_api.group_list[group_type_ind]["singleLetterCode"],
data_api.sequence_index_list[group_index],
data_api.sec_struct_list[group_index])
for group_atom_ind in range(atom_count):
add_atom_data(data_api, data_setters,
data_api.group_list[group_type_ind]["atomNameList"],
data_api.group_list[group_type_ind]["elementList"],
data_api.group_list[group_type_ind]["formalChargeList"],
group_atom_ind)
data_api.atom_counter +=1
add_group_bonds(data_setters,
data_api.group_list[group_type_ind]["bondAtomList"],
data_api.group_list[group_type_ind]["bondOrderList"])
return atom_count | python | def add_group(data_api, data_setters, group_index):
"""Add the data for a whole group.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param group_index the index for this group"""
group_type_ind = data_api.group_type_list[group_index]
atom_count = len(data_api.group_list[group_type_ind]["atomNameList"])
insertion_code = data_api.ins_code_list[group_index]
data_setters.set_group_info(data_api.group_list[group_type_ind]["groupName"],
data_api.group_id_list[group_index], insertion_code,
data_api.group_list[group_type_ind]["chemCompType"],
atom_count, data_api.num_bonds,
data_api.group_list[group_type_ind]["singleLetterCode"],
data_api.sequence_index_list[group_index],
data_api.sec_struct_list[group_index])
for group_atom_ind in range(atom_count):
add_atom_data(data_api, data_setters,
data_api.group_list[group_type_ind]["atomNameList"],
data_api.group_list[group_type_ind]["elementList"],
data_api.group_list[group_type_ind]["formalChargeList"],
group_atom_ind)
data_api.atom_counter +=1
add_group_bonds(data_setters,
data_api.group_list[group_type_ind]["bondAtomList"],
data_api.group_list[group_type_ind]["bondOrderList"])
return atom_count | [
"def",
"add_group",
"(",
"data_api",
",",
"data_setters",
",",
"group_index",
")",
":",
"group_type_ind",
"=",
"data_api",
".",
"group_type_list",
"[",
"group_index",
"]",
"atom_count",
"=",
"len",
"(",
"data_api",
".",
"group_list",
"[",
"group_type_ind",
"]",
... | Add the data for a whole group.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param group_index the index for this group | [
"Add",
"the",
"data",
"for",
"a",
"whole",
"group",
".",
":",
"param",
"data_api",
"the",
"data",
"api",
"from",
"where",
"to",
"get",
"the",
"data",
":",
"param",
"data_setters",
"the",
"class",
"to",
"push",
"the",
"data",
"to",
":",
"param",
"group_... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L36-L61 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_chain_info | def add_chain_info(data_api, data_setters, chain_index):
"""Add the data for a whole chain.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param chain_index the index for this chain"""
chain_id = data_api.chain_id_list[chain_index]
chain_name = data_api.chain_name_list[chain_index]
num_groups = data_api.groups_per_chain[chain_index]
data_setters.set_chain_info(chain_id, chain_name, num_groups)
next_ind = data_api.group_counter + num_groups
last_ind = data_api.group_counter
for group_ind in range(last_ind, next_ind):
add_group(data_api, data_setters, group_ind)
data_api.group_counter +=1
data_api.chain_counter+=1 | python | def add_chain_info(data_api, data_setters, chain_index):
"""Add the data for a whole chain.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param chain_index the index for this chain"""
chain_id = data_api.chain_id_list[chain_index]
chain_name = data_api.chain_name_list[chain_index]
num_groups = data_api.groups_per_chain[chain_index]
data_setters.set_chain_info(chain_id, chain_name, num_groups)
next_ind = data_api.group_counter + num_groups
last_ind = data_api.group_counter
for group_ind in range(last_ind, next_ind):
add_group(data_api, data_setters, group_ind)
data_api.group_counter +=1
data_api.chain_counter+=1 | [
"def",
"add_chain_info",
"(",
"data_api",
",",
"data_setters",
",",
"chain_index",
")",
":",
"chain_id",
"=",
"data_api",
".",
"chain_id_list",
"[",
"chain_index",
"]",
"chain_name",
"=",
"data_api",
".",
"chain_name_list",
"[",
"chain_index",
"]",
"num_groups",
... | Add the data for a whole chain.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to
:param chain_index the index for this chain | [
"Add",
"the",
"data",
"for",
"a",
"whole",
"chain",
".",
":",
"param",
"data_api",
"the",
"data",
"api",
"from",
"where",
"to",
"get",
"the",
"data",
":",
"param",
"data_setters",
"the",
"class",
"to",
"push",
"the",
"data",
"to",
":",
"param",
"chain_... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L64-L78 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_atomic_information | def add_atomic_information(data_api, data_setters):
"""Add all the structural information.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to"""
for model_chains in data_api.chains_per_model:
data_setters.set_model_info(data_api.model_counter, model_chains)
tot_chains_this_model = data_api.chain_counter + model_chains
last_chain_counter = data_api.chain_counter
for chain_index in range(last_chain_counter, tot_chains_this_model):
add_chain_info(data_api, data_setters, chain_index)
data_api.model_counter+=1 | python | def add_atomic_information(data_api, data_setters):
"""Add all the structural information.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to"""
for model_chains in data_api.chains_per_model:
data_setters.set_model_info(data_api.model_counter, model_chains)
tot_chains_this_model = data_api.chain_counter + model_chains
last_chain_counter = data_api.chain_counter
for chain_index in range(last_chain_counter, tot_chains_this_model):
add_chain_info(data_api, data_setters, chain_index)
data_api.model_counter+=1 | [
"def",
"add_atomic_information",
"(",
"data_api",
",",
"data_setters",
")",
":",
"for",
"model_chains",
"in",
"data_api",
".",
"chains_per_model",
":",
"data_setters",
".",
"set_model_info",
"(",
"data_api",
".",
"model_counter",
",",
"model_chains",
")",
"tot_chain... | Add all the structural information.
:param data_api the data api from where to get the data
:param data_setters the class to push the data to | [
"Add",
"all",
"the",
"structural",
"information",
".",
":",
"param",
"data_api",
"the",
"data",
"api",
"from",
"where",
"to",
"get",
"the",
"data",
":",
"param",
"data_setters",
"the",
"class",
"to",
"push",
"the",
"data",
"to"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L81-L91 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | generate_bio_assembly | def generate_bio_assembly(data_api, struct_inflator):
"""Generate the bioassembly data.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object"""
bioassembly_count = 0
for bioassembly in data_api.bio_assembly:
bioassembly_count += 1
for transform in bioassembly["transformList"]:
struct_inflator.set_bio_assembly_trans(bioassembly_count,
transform["chainIndexList"],
transform["matrix"]) | python | def generate_bio_assembly(data_api, struct_inflator):
"""Generate the bioassembly data.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object"""
bioassembly_count = 0
for bioassembly in data_api.bio_assembly:
bioassembly_count += 1
for transform in bioassembly["transformList"]:
struct_inflator.set_bio_assembly_trans(bioassembly_count,
transform["chainIndexList"],
transform["matrix"]) | [
"def",
"generate_bio_assembly",
"(",
"data_api",
",",
"struct_inflator",
")",
":",
"bioassembly_count",
"=",
"0",
"for",
"bioassembly",
"in",
"data_api",
".",
"bio_assembly",
":",
"bioassembly_count",
"+=",
"1",
"for",
"transform",
"in",
"bioassembly",
"[",
"\"tra... | Generate the bioassembly data.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object | [
"Generate",
"the",
"bioassembly",
"data",
".",
":",
"param",
"data_api",
"the",
"interface",
"to",
"the",
"decoded",
"data",
":",
"param",
"struct_inflator",
"the",
"interface",
"to",
"put",
"the",
"data",
"into",
"the",
"client",
"object"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L94-L104 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_inter_group_bonds | def add_inter_group_bonds(data_api, struct_inflator):
""" Generate inter group bonds.
Bond indices are specified within the whole structure and start at 0.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object"""
for i in range(len(data_api.bond_order_list)):
struct_inflator.set_inter_group_bond(data_api.bond_atom_list[i * 2],
data_api.bond_atom_list[i * 2 + 1],
data_api.bond_order_list[i]) | python | def add_inter_group_bonds(data_api, struct_inflator):
""" Generate inter group bonds.
Bond indices are specified within the whole structure and start at 0.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object"""
for i in range(len(data_api.bond_order_list)):
struct_inflator.set_inter_group_bond(data_api.bond_atom_list[i * 2],
data_api.bond_atom_list[i * 2 + 1],
data_api.bond_order_list[i]) | [
"def",
"add_inter_group_bonds",
"(",
"data_api",
",",
"struct_inflator",
")",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"data_api",
".",
"bond_order_list",
")",
")",
":",
"struct_inflator",
".",
"set_inter_group_bond",
"(",
"data_api",
".",
"bond_atom_lis... | Generate inter group bonds.
Bond indices are specified within the whole structure and start at 0.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object | [
"Generate",
"inter",
"group",
"bonds",
".",
"Bond",
"indices",
"are",
"specified",
"within",
"the",
"whole",
"structure",
"and",
"start",
"at",
"0",
".",
":",
"param",
"data_api",
"the",
"interface",
"to",
"the",
"decoded",
"data",
":",
"param",
"struct_infl... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L106-L114 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_header_info | def add_header_info(data_api, struct_inflator):
""" Add ancilliary header information to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object
"""
struct_inflator.set_header_info(data_api.r_free,
data_api.r_work,
data_api.resolution,
data_api.title,
data_api.deposition_date,
data_api.release_date,
data_api.experimental_methods) | python | def add_header_info(data_api, struct_inflator):
""" Add ancilliary header information to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object
"""
struct_inflator.set_header_info(data_api.r_free,
data_api.r_work,
data_api.resolution,
data_api.title,
data_api.deposition_date,
data_api.release_date,
data_api.experimental_methods) | [
"def",
"add_header_info",
"(",
"data_api",
",",
"struct_inflator",
")",
":",
"struct_inflator",
".",
"set_header_info",
"(",
"data_api",
".",
"r_free",
",",
"data_api",
".",
"r_work",
",",
"data_api",
".",
"resolution",
",",
"data_api",
".",
"title",
",",
"dat... | Add ancilliary header information to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object | [
"Add",
"ancilliary",
"header",
"information",
"to",
"the",
"structure",
".",
":",
"param",
"data_api",
"the",
"interface",
"to",
"the",
"decoded",
"data",
":",
"param",
"struct_inflator",
"the",
"interface",
"to",
"put",
"the",
"data",
"into",
"the",
"client",... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L118-L129 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_xtalographic_info | def add_xtalographic_info(data_api, struct_inflator):
""" Add the crystallographic data to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object"""
if data_api.unit_cell == None and data_api.space_group is not None:
struct_inflator.set_xtal_info(data_api.space_group,
constants.UNKNOWN_UNIT_CELL)
elif data_api.unit_cell is not None and data_api.space_group is None:
struct_inflator.set_xtal_info(constants.UNKNOWN_SPACE_GROUP,
data_api.unit_cell)
elif data_api.unit_cell is None and data_api.space_group is None:
struct_inflator.set_xtal_info(constants.UNKNOWN_SPACE_GROUP,
constants.UNKNOWN_UNIT_CELL)
else:
struct_inflator.set_xtal_info(data_api.space_group,
data_api.unit_cell) | python | def add_xtalographic_info(data_api, struct_inflator):
""" Add the crystallographic data to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object"""
if data_api.unit_cell == None and data_api.space_group is not None:
struct_inflator.set_xtal_info(data_api.space_group,
constants.UNKNOWN_UNIT_CELL)
elif data_api.unit_cell is not None and data_api.space_group is None:
struct_inflator.set_xtal_info(constants.UNKNOWN_SPACE_GROUP,
data_api.unit_cell)
elif data_api.unit_cell is None and data_api.space_group is None:
struct_inflator.set_xtal_info(constants.UNKNOWN_SPACE_GROUP,
constants.UNKNOWN_UNIT_CELL)
else:
struct_inflator.set_xtal_info(data_api.space_group,
data_api.unit_cell) | [
"def",
"add_xtalographic_info",
"(",
"data_api",
",",
"struct_inflator",
")",
":",
"if",
"data_api",
".",
"unit_cell",
"==",
"None",
"and",
"data_api",
".",
"space_group",
"is",
"not",
"None",
":",
"struct_inflator",
".",
"set_xtal_info",
"(",
"data_api",
".",
... | Add the crystallographic data to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object | [
"Add",
"the",
"crystallographic",
"data",
"to",
"the",
"structure",
".",
":",
"param",
"data_api",
"the",
"interface",
"to",
"the",
"decoded",
"data",
":",
"param",
"struct_inflator",
"the",
"interface",
"to",
"put",
"the",
"data",
"into",
"the",
"client",
"... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L133-L148 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | add_entity_info | def add_entity_info( data_api, struct_inflator):
"""Add the entity info to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object
"""
for entity in data_api.entity_list:
struct_inflator.set_entity_info(entity["chainIndexList"],
entity["sequence"],
entity["description"],
entity["type"]) | python | def add_entity_info( data_api, struct_inflator):
"""Add the entity info to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object
"""
for entity in data_api.entity_list:
struct_inflator.set_entity_info(entity["chainIndexList"],
entity["sequence"],
entity["description"],
entity["type"]) | [
"def",
"add_entity_info",
"(",
"data_api",
",",
"struct_inflator",
")",
":",
"for",
"entity",
"in",
"data_api",
".",
"entity_list",
":",
"struct_inflator",
".",
"set_entity_info",
"(",
"entity",
"[",
"\"chainIndexList\"",
"]",
",",
"entity",
"[",
"\"sequence\"",
... | Add the entity info to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object | [
"Add",
"the",
"entity",
"info",
"to",
"the",
"structure",
".",
":",
"param",
"data_api",
"the",
"interface",
"to",
"the",
"decoded",
"data",
":",
"param",
"struct_inflator",
"the",
"interface",
"to",
"put",
"the",
"data",
"into",
"the",
"client",
"object"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L150-L159 | train |
rcsb/mmtf-python | mmtf/utils/decoder_utils.py | get_bonds | def get_bonds(input_group):
"""Utility function to get indices (in pairs) of the bonds."""
out_list = []
for i in range(len(input_group.bond_order_list)):
out_list.append((input_group.bond_atom_list[i * 2], input_group.bond_atom_list[i * 2 + 1],))
return out_list | python | def get_bonds(input_group):
"""Utility function to get indices (in pairs) of the bonds."""
out_list = []
for i in range(len(input_group.bond_order_list)):
out_list.append((input_group.bond_atom_list[i * 2], input_group.bond_atom_list[i * 2 + 1],))
return out_list | [
"def",
"get_bonds",
"(",
"input_group",
")",
":",
"out_list",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"input_group",
".",
"bond_order_list",
")",
")",
":",
"out_list",
".",
"append",
"(",
"(",
"input_group",
".",
"bond_atom_list",
"[",... | Utility function to get indices (in pairs) of the bonds. | [
"Utility",
"function",
"to",
"get",
"indices",
"(",
"in",
"pairs",
")",
"of",
"the",
"bonds",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/decoder_utils.py#L162-L167 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | get_unique_groups | def get_unique_groups(input_list):
"""Function to get a unique list of groups."""
out_list = []
for item in input_list:
if item not in out_list:
out_list.append(item)
return out_list | python | def get_unique_groups(input_list):
"""Function to get a unique list of groups."""
out_list = []
for item in input_list:
if item not in out_list:
out_list.append(item)
return out_list | [
"def",
"get_unique_groups",
"(",
"input_list",
")",
":",
"out_list",
"=",
"[",
"]",
"for",
"item",
"in",
"input_list",
":",
"if",
"item",
"not",
"in",
"out_list",
":",
"out_list",
".",
"append",
"(",
"item",
")",
"return",
"out_list"
] | Function to get a unique list of groups. | [
"Function",
"to",
"get",
"a",
"unique",
"list",
"of",
"groups",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L59-L65 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | Group.convert_to_dict | def convert_to_dict(self):
"""Convert the group object to an appropriate DICT"""
out_dict = {}
out_dict["groupName"] = self.group_name
out_dict["atomNameList"] = self.atom_name_list
out_dict["elementList"] = self.element_list
out_dict["bondOrderList"] = self.bond_order_list
out_dict["bondAtomList"] = self.bond_atom_list
out_dict["formalChargeList"] = self.charge_list
out_dict["singleLetterCode"] = self.single_letter_code
out_dict["chemCompType"] = self.group_type
return out_dict | python | def convert_to_dict(self):
"""Convert the group object to an appropriate DICT"""
out_dict = {}
out_dict["groupName"] = self.group_name
out_dict["atomNameList"] = self.atom_name_list
out_dict["elementList"] = self.element_list
out_dict["bondOrderList"] = self.bond_order_list
out_dict["bondAtomList"] = self.bond_atom_list
out_dict["formalChargeList"] = self.charge_list
out_dict["singleLetterCode"] = self.single_letter_code
out_dict["chemCompType"] = self.group_type
return out_dict | [
"def",
"convert_to_dict",
"(",
"self",
")",
":",
"out_dict",
"=",
"{",
"}",
"out_dict",
"[",
"\"groupName\"",
"]",
"=",
"self",
".",
"group_name",
"out_dict",
"[",
"\"atomNameList\"",
"]",
"=",
"self",
".",
"atom_name_list",
"out_dict",
"[",
"\"elementList\"",... | Convert the group object to an appropriate DICT | [
"Convert",
"the",
"group",
"object",
"to",
"an",
"appropriate",
"DICT"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L45-L56 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | TemplateEncoder.set_atom_info | def set_atom_info(self, atom_name, serial_number, alternative_location_id,
x, y, z, occupancy, temperature_factor, element, charge):
"""Create an atom object an set the information.
:param atom_name: the atom name, e.g. CA for this atom
:param serial_number: the serial id of the atom (e.g. 1)
:param alternative_location_id: the alternative location id for the atom, if present
:param x: the x coordiante of the atom
:param y: the y coordinate of the atom
:param z: the z coordinate of the atom
:param occupancy: the occupancy of the atom
:param temperature_factor: the temperature factor of the atom
:param element: the element of the atom, e.g. C for carbon. According to IUPAC. Calcium is Ca
:param charge: the formal atomic charge of the atom
"""
raise NotImplementedError | python | def set_atom_info(self, atom_name, serial_number, alternative_location_id,
x, y, z, occupancy, temperature_factor, element, charge):
"""Create an atom object an set the information.
:param atom_name: the atom name, e.g. CA for this atom
:param serial_number: the serial id of the atom (e.g. 1)
:param alternative_location_id: the alternative location id for the atom, if present
:param x: the x coordiante of the atom
:param y: the y coordinate of the atom
:param z: the z coordinate of the atom
:param occupancy: the occupancy of the atom
:param temperature_factor: the temperature factor of the atom
:param element: the element of the atom, e.g. C for carbon. According to IUPAC. Calcium is Ca
:param charge: the formal atomic charge of the atom
"""
raise NotImplementedError | [
"def",
"set_atom_info",
"(",
"self",
",",
"atom_name",
",",
"serial_number",
",",
"alternative_location_id",
",",
"x",
",",
"y",
",",
"z",
",",
"occupancy",
",",
"temperature_factor",
",",
"element",
",",
"charge",
")",
":",
"raise",
"NotImplementedError"
] | Create an atom object an set the information.
:param atom_name: the atom name, e.g. CA for this atom
:param serial_number: the serial id of the atom (e.g. 1)
:param alternative_location_id: the alternative location id for the atom, if present
:param x: the x coordiante of the atom
:param y: the y coordinate of the atom
:param z: the z coordinate of the atom
:param occupancy: the occupancy of the atom
:param temperature_factor: the temperature factor of the atom
:param element: the element of the atom, e.g. C for carbon. According to IUPAC. Calcium is Ca
:param charge: the formal atomic charge of the atom | [
"Create",
"an",
"atom",
"object",
"an",
"set",
"the",
"information",
".",
":",
"param",
"atom_name",
":",
"the",
"atom",
"name",
"e",
".",
"g",
".",
"CA",
"for",
"this",
"atom",
":",
"param",
"serial_number",
":",
"the",
"serial",
"id",
"of",
"the",
... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L84-L98 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | TemplateEncoder.set_group_info | def set_group_info(self, group_name, group_number, insertion_code,
group_type, atom_count, bond_count, single_letter_code,
sequence_index, secondary_structure_type):
"""Set the information for a group
:param group_name: the name of this group,e.g. LYS
:param group_number: the residue number of this group
:param insertion_code: the insertion code for this group
:param group_type: a string indicating the type of group (as found in the chemcomp dictionary.
Empty string if none available.
:param atom_count: the number of atoms in the group
:param bond_count: the number of unique bonds in the group
:param single_letter_code: the single letter code of the group
:param sequence_index: the index of this group in the sequence defined by the enttiy
:param secondary_structure_type: the type of secondary structure used (types are according to DSSP and
number to type mappings are defined in the specification)
"""
raise NotImplementedError | python | def set_group_info(self, group_name, group_number, insertion_code,
group_type, atom_count, bond_count, single_letter_code,
sequence_index, secondary_structure_type):
"""Set the information for a group
:param group_name: the name of this group,e.g. LYS
:param group_number: the residue number of this group
:param insertion_code: the insertion code for this group
:param group_type: a string indicating the type of group (as found in the chemcomp dictionary.
Empty string if none available.
:param atom_count: the number of atoms in the group
:param bond_count: the number of unique bonds in the group
:param single_letter_code: the single letter code of the group
:param sequence_index: the index of this group in the sequence defined by the enttiy
:param secondary_structure_type: the type of secondary structure used (types are according to DSSP and
number to type mappings are defined in the specification)
"""
raise NotImplementedError | [
"def",
"set_group_info",
"(",
"self",
",",
"group_name",
",",
"group_number",
",",
"insertion_code",
",",
"group_type",
",",
"atom_count",
",",
"bond_count",
",",
"single_letter_code",
",",
"sequence_index",
",",
"secondary_structure_type",
")",
":",
"raise",
"NotIm... | Set the information for a group
:param group_name: the name of this group,e.g. LYS
:param group_number: the residue number of this group
:param insertion_code: the insertion code for this group
:param group_type: a string indicating the type of group (as found in the chemcomp dictionary.
Empty string if none available.
:param atom_count: the number of atoms in the group
:param bond_count: the number of unique bonds in the group
:param single_letter_code: the single letter code of the group
:param sequence_index: the index of this group in the sequence defined by the enttiy
:param secondary_structure_type: the type of secondary structure used (types are according to DSSP and
number to type mappings are defined in the specification) | [
"Set",
"the",
"information",
"for",
"a",
"group",
":",
"param",
"group_name",
":",
"the",
"name",
"of",
"this",
"group",
"e",
".",
"g",
".",
"LYS",
":",
"param",
"group_number",
":",
"the",
"residue",
"number",
"of",
"this",
"group",
":",
"param",
"ins... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L121-L137 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | TemplateEncoder.set_header_info | def set_header_info(self, r_free, r_work, resolution, title,
deposition_date, release_date, experimental_methods):
"""Sets the header information.
:param r_free: the measured R-Free for the structure
:param r_work: the measure R-Work for the structure
:param resolution: the resolution of the structure
:param title: the title of the structure
:param deposition_date: the deposition date of the structure
:param release_date: the release date of the structure
:param experimnetal_methods: the list of experimental methods in the structure
"""
raise NotImplementedError | python | def set_header_info(self, r_free, r_work, resolution, title,
deposition_date, release_date, experimental_methods):
"""Sets the header information.
:param r_free: the measured R-Free for the structure
:param r_work: the measure R-Work for the structure
:param resolution: the resolution of the structure
:param title: the title of the structure
:param deposition_date: the deposition date of the structure
:param release_date: the release date of the structure
:param experimnetal_methods: the list of experimental methods in the structure
"""
raise NotImplementedError | [
"def",
"set_header_info",
"(",
"self",
",",
"r_free",
",",
"r_work",
",",
"resolution",
",",
"title",
",",
"deposition_date",
",",
"release_date",
",",
"experimental_methods",
")",
":",
"raise",
"NotImplementedError"
] | Sets the header information.
:param r_free: the measured R-Free for the structure
:param r_work: the measure R-Work for the structure
:param resolution: the resolution of the structure
:param title: the title of the structure
:param deposition_date: the deposition date of the structure
:param release_date: the release date of the structure
:param experimnetal_methods: the list of experimental methods in the structure | [
"Sets",
"the",
"header",
"information",
".",
":",
"param",
"r_free",
":",
"the",
"measured",
"R",
"-",
"Free",
"for",
"the",
"structure",
":",
"param",
"r_work",
":",
"the",
"measure",
"R",
"-",
"Work",
"for",
"the",
"structure",
":",
"param",
"resolutio... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L158-L169 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.encode_data | def encode_data(self):
"""Encode the data back into a dict."""
output_data = {}
output_data["groupTypeList"] = encode_array(self.group_type_list, 4, 0)
output_data["xCoordList"] = encode_array(self.x_coord_list, 10, 1000)
output_data["yCoordList"] = encode_array(self.y_coord_list, 10, 1000)
output_data["zCoordList"] = encode_array(self.z_coord_list, 10, 1000)
output_data["bFactorList"] = encode_array(self.b_factor_list, 10, 100)
output_data["occupancyList"] = encode_array(self.occupancy_list, 9, 100)
output_data["atomIdList"] = encode_array(self.atom_id_list, 8, 0)
output_data["altLocList"] = encode_array(self.alt_loc_list, 6, 0)
output_data["insCodeList"] = encode_array(self.ins_code_list, 6, 0)
output_data["groupIdList"] = encode_array(self.group_id_list, 8, 0)
output_data["groupList"] = self.group_list
output_data["sequenceIndexList"] = encode_array(self.sequence_index_list, 8, 0)
output_data["chainNameList"] = encode_array(self.chain_name_list, 5, 4)
output_data["chainIdList"] = encode_array(self.chain_id_list, 5, 4)
output_data["bondAtomList"] = encode_array(self.bond_atom_list, 4, 0)
output_data["bondOrderList"] = encode_array(self.bond_order_list, 2, 0)
output_data["secStructList"] = encode_array(self.sec_struct_list, 2, 0)
output_data["chainsPerModel"] = self.chains_per_model
output_data["groupsPerChain"] = self.groups_per_chain
output_data["spaceGroup"] = self.space_group
output_data["mmtfVersion"] = self.mmtf_version
output_data["mmtfProducer"] = self.mmtf_producer
output_data["structureId"] = self.structure_id
output_data["entityList"] = self.entity_list
output_data["bioAssemblyList"] = self.bio_assembly
output_data["rFree"] = self.r_free
output_data["rWork"] = self.r_work
output_data["resolution"] = self.resolution
output_data["title"] = self.title
output_data["experimentalMethods"] = self.experimental_methods
output_data["depositionDate"] = self.deposition_date
output_data["releaseDate"] = self.release_date
output_data["unitCell"] = self.unit_cell
output_data["numBonds"] = self.num_bonds
output_data["numChains"] = self.num_chains
output_data["numModels"] = self.num_models
output_data["numAtoms"] = self.num_atoms
output_data["numGroups"] = self.num_groups
return output_data | python | def encode_data(self):
"""Encode the data back into a dict."""
output_data = {}
output_data["groupTypeList"] = encode_array(self.group_type_list, 4, 0)
output_data["xCoordList"] = encode_array(self.x_coord_list, 10, 1000)
output_data["yCoordList"] = encode_array(self.y_coord_list, 10, 1000)
output_data["zCoordList"] = encode_array(self.z_coord_list, 10, 1000)
output_data["bFactorList"] = encode_array(self.b_factor_list, 10, 100)
output_data["occupancyList"] = encode_array(self.occupancy_list, 9, 100)
output_data["atomIdList"] = encode_array(self.atom_id_list, 8, 0)
output_data["altLocList"] = encode_array(self.alt_loc_list, 6, 0)
output_data["insCodeList"] = encode_array(self.ins_code_list, 6, 0)
output_data["groupIdList"] = encode_array(self.group_id_list, 8, 0)
output_data["groupList"] = self.group_list
output_data["sequenceIndexList"] = encode_array(self.sequence_index_list, 8, 0)
output_data["chainNameList"] = encode_array(self.chain_name_list, 5, 4)
output_data["chainIdList"] = encode_array(self.chain_id_list, 5, 4)
output_data["bondAtomList"] = encode_array(self.bond_atom_list, 4, 0)
output_data["bondOrderList"] = encode_array(self.bond_order_list, 2, 0)
output_data["secStructList"] = encode_array(self.sec_struct_list, 2, 0)
output_data["chainsPerModel"] = self.chains_per_model
output_data["groupsPerChain"] = self.groups_per_chain
output_data["spaceGroup"] = self.space_group
output_data["mmtfVersion"] = self.mmtf_version
output_data["mmtfProducer"] = self.mmtf_producer
output_data["structureId"] = self.structure_id
output_data["entityList"] = self.entity_list
output_data["bioAssemblyList"] = self.bio_assembly
output_data["rFree"] = self.r_free
output_data["rWork"] = self.r_work
output_data["resolution"] = self.resolution
output_data["title"] = self.title
output_data["experimentalMethods"] = self.experimental_methods
output_data["depositionDate"] = self.deposition_date
output_data["releaseDate"] = self.release_date
output_data["unitCell"] = self.unit_cell
output_data["numBonds"] = self.num_bonds
output_data["numChains"] = self.num_chains
output_data["numModels"] = self.num_models
output_data["numAtoms"] = self.num_atoms
output_data["numGroups"] = self.num_groups
return output_data | [
"def",
"encode_data",
"(",
"self",
")",
":",
"output_data",
"=",
"{",
"}",
"output_data",
"[",
"\"groupTypeList\"",
"]",
"=",
"encode_array",
"(",
"self",
".",
"group_type_list",
",",
"4",
",",
"0",
")",
"output_data",
"[",
"\"xCoordList\"",
"]",
"=",
"enc... | Encode the data back into a dict. | [
"Encode",
"the",
"data",
"back",
"into",
"a",
"dict",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L209-L250 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.init_structure | def init_structure(self, total_num_bonds, total_num_atoms,
total_num_groups, total_num_chains, total_num_models,
structure_id):
"""Initialise the structure object.
:param total_num_bonds: the number of bonds in the structure
:param total_num_atoms: the number of atoms in the structure
:param total_num_groups: the number of groups in the structure
:param total_num_chains: the number of chains in the structure
:param total_num_models: the number of models in the structure
:param structure_id the: id of the structure (e.g. PDB id)
"""
self.mmtf_version = constants.MMTF_VERSION
self.mmtf_producer = constants.PRODUCER
self.num_atoms = total_num_atoms
self.num_bonds = total_num_bonds
self.num_groups = total_num_groups
self.num_chains = total_num_chains
self.num_models = total_num_models
self.structure_id = structure_id
# initialise the arrays
self.x_coord_list = []
self.y_coord_list = []
self.z_coord_list = []
self.group_type_list = []
self.entity_list = []
self.b_factor_list = []
self.occupancy_list = []
self.atom_id_list = []
self.alt_loc_list = []
self.ins_code_list = []
self.group_id_list = []
self.sequence_index_list = []
self.group_list = []
self.chain_name_list = []
self.chain_id_list = []
self.bond_atom_list = []
self.bond_order_list = []
self.sec_struct_list = []
self.chains_per_model = []
self.groups_per_chain = []
self.current_group = None
self.bio_assembly = [] | python | def init_structure(self, total_num_bonds, total_num_atoms,
total_num_groups, total_num_chains, total_num_models,
structure_id):
"""Initialise the structure object.
:param total_num_bonds: the number of bonds in the structure
:param total_num_atoms: the number of atoms in the structure
:param total_num_groups: the number of groups in the structure
:param total_num_chains: the number of chains in the structure
:param total_num_models: the number of models in the structure
:param structure_id the: id of the structure (e.g. PDB id)
"""
self.mmtf_version = constants.MMTF_VERSION
self.mmtf_producer = constants.PRODUCER
self.num_atoms = total_num_atoms
self.num_bonds = total_num_bonds
self.num_groups = total_num_groups
self.num_chains = total_num_chains
self.num_models = total_num_models
self.structure_id = structure_id
# initialise the arrays
self.x_coord_list = []
self.y_coord_list = []
self.z_coord_list = []
self.group_type_list = []
self.entity_list = []
self.b_factor_list = []
self.occupancy_list = []
self.atom_id_list = []
self.alt_loc_list = []
self.ins_code_list = []
self.group_id_list = []
self.sequence_index_list = []
self.group_list = []
self.chain_name_list = []
self.chain_id_list = []
self.bond_atom_list = []
self.bond_order_list = []
self.sec_struct_list = []
self.chains_per_model = []
self.groups_per_chain = []
self.current_group = None
self.bio_assembly = [] | [
"def",
"init_structure",
"(",
"self",
",",
"total_num_bonds",
",",
"total_num_atoms",
",",
"total_num_groups",
",",
"total_num_chains",
",",
"total_num_models",
",",
"structure_id",
")",
":",
"self",
".",
"mmtf_version",
"=",
"constants",
".",
"MMTF_VERSION",
"self"... | Initialise the structure object.
:param total_num_bonds: the number of bonds in the structure
:param total_num_atoms: the number of atoms in the structure
:param total_num_groups: the number of groups in the structure
:param total_num_chains: the number of chains in the structure
:param total_num_models: the number of models in the structure
:param structure_id the: id of the structure (e.g. PDB id) | [
"Initialise",
"the",
"structure",
"object",
".",
":",
"param",
"total_num_bonds",
":",
"the",
"number",
"of",
"bonds",
"in",
"the",
"structure",
":",
"param",
"total_num_atoms",
":",
"the",
"number",
"of",
"atoms",
"in",
"the",
"structure",
":",
"param",
"to... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L263-L304 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_atom_info | def set_atom_info(self, atom_name, serial_number, alternative_location_id,
x, y, z, occupancy, temperature_factor, element, charge):
"""Create an atom object an set the information.
:param atom_name: the atom name, e.g. CA for this atom
:param serial_number: the serial id of the atom (e.g. 1)
:param alternative_location_id: the alternative location id for the atom, if present
:param x: the x coordiante of the atom
:param y: the y coordinate of the atom
:param z: the z coordinate of the atom
:param occupancy: the occupancy of the atom
:param temperature_factor: the temperature factor of the atom
:param element: the element of the atom, e.g. C for carbon. According to IUPAC. Calcium is Ca
:param charge: the formal atomic charge of the atom
"""
self.x_coord_list.append(x)
self.y_coord_list.append(y)
self.z_coord_list.append(z)
self.atom_id_list.append(serial_number)
self.alt_loc_list.append(alternative_location_id)
self.occupancy_list.append(occupancy)
self.b_factor_list.append(temperature_factor)
## Now add the group level data
self.current_group.atom_name_list.append(atom_name)
self.current_group.charge_list.append(charge)
self.current_group.element_list.append(element) | python | def set_atom_info(self, atom_name, serial_number, alternative_location_id,
x, y, z, occupancy, temperature_factor, element, charge):
"""Create an atom object an set the information.
:param atom_name: the atom name, e.g. CA for this atom
:param serial_number: the serial id of the atom (e.g. 1)
:param alternative_location_id: the alternative location id for the atom, if present
:param x: the x coordiante of the atom
:param y: the y coordinate of the atom
:param z: the z coordinate of the atom
:param occupancy: the occupancy of the atom
:param temperature_factor: the temperature factor of the atom
:param element: the element of the atom, e.g. C for carbon. According to IUPAC. Calcium is Ca
:param charge: the formal atomic charge of the atom
"""
self.x_coord_list.append(x)
self.y_coord_list.append(y)
self.z_coord_list.append(z)
self.atom_id_list.append(serial_number)
self.alt_loc_list.append(alternative_location_id)
self.occupancy_list.append(occupancy)
self.b_factor_list.append(temperature_factor)
## Now add the group level data
self.current_group.atom_name_list.append(atom_name)
self.current_group.charge_list.append(charge)
self.current_group.element_list.append(element) | [
"def",
"set_atom_info",
"(",
"self",
",",
"atom_name",
",",
"serial_number",
",",
"alternative_location_id",
",",
"x",
",",
"y",
",",
"z",
",",
"occupancy",
",",
"temperature_factor",
",",
"element",
",",
"charge",
")",
":",
"self",
".",
"x_coord_list",
".",... | Create an atom object an set the information.
:param atom_name: the atom name, e.g. CA for this atom
:param serial_number: the serial id of the atom (e.g. 1)
:param alternative_location_id: the alternative location id for the atom, if present
:param x: the x coordiante of the atom
:param y: the y coordinate of the atom
:param z: the z coordinate of the atom
:param occupancy: the occupancy of the atom
:param temperature_factor: the temperature factor of the atom
:param element: the element of the atom, e.g. C for carbon. According to IUPAC. Calcium is Ca
:param charge: the formal atomic charge of the atom | [
"Create",
"an",
"atom",
"object",
"an",
"set",
"the",
"information",
".",
":",
"param",
"atom_name",
":",
"the",
"atom",
"name",
"e",
".",
"g",
".",
"CA",
"for",
"this",
"atom",
":",
"param",
"serial_number",
":",
"the",
"serial",
"id",
"of",
"the",
... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L307-L331 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_chain_info | def set_chain_info(self, chain_id, chain_name, num_groups):
"""Set the chain information.
:param chain_id: the asym chain id from mmCIF
:param chain_name: the auth chain id from mmCIF
:param num_groups: the number of groups this chain has
"""
self.chain_id_list.append(chain_id)
self.chain_name_list.append(chain_name)
self.groups_per_chain.append(num_groups) | python | def set_chain_info(self, chain_id, chain_name, num_groups):
"""Set the chain information.
:param chain_id: the asym chain id from mmCIF
:param chain_name: the auth chain id from mmCIF
:param num_groups: the number of groups this chain has
"""
self.chain_id_list.append(chain_id)
self.chain_name_list.append(chain_name)
self.groups_per_chain.append(num_groups) | [
"def",
"set_chain_info",
"(",
"self",
",",
"chain_id",
",",
"chain_name",
",",
"num_groups",
")",
":",
"self",
".",
"chain_id_list",
".",
"append",
"(",
"chain_id",
")",
"self",
".",
"chain_name_list",
".",
"append",
"(",
"chain_name",
")",
"self",
".",
"g... | Set the chain information.
:param chain_id: the asym chain id from mmCIF
:param chain_name: the auth chain id from mmCIF
:param num_groups: the number of groups this chain has | [
"Set",
"the",
"chain",
"information",
".",
":",
"param",
"chain_id",
":",
"the",
"asym",
"chain",
"id",
"from",
"mmCIF",
":",
"param",
"chain_name",
":",
"the",
"auth",
"chain",
"id",
"from",
"mmCIF",
":",
"param",
"num_groups",
":",
"the",
"number",
"of... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L334-L342 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_entity_info | def set_entity_info(self, chain_indices, sequence, description, entity_type):
"""Set the entity level information for the structure.
:param chain_indices: the indices of the chains for this entity
:param sequence: the one letter code sequence for this entity
:param description: the description for this entity
:param entity_type: the entity type (polymer,non-polymer,water)
"""
self.entity_list.append(make_entity_dict(chain_indices,sequence,description,entity_type)) | python | def set_entity_info(self, chain_indices, sequence, description, entity_type):
"""Set the entity level information for the structure.
:param chain_indices: the indices of the chains for this entity
:param sequence: the one letter code sequence for this entity
:param description: the description for this entity
:param entity_type: the entity type (polymer,non-polymer,water)
"""
self.entity_list.append(make_entity_dict(chain_indices,sequence,description,entity_type)) | [
"def",
"set_entity_info",
"(",
"self",
",",
"chain_indices",
",",
"sequence",
",",
"description",
",",
"entity_type",
")",
":",
"self",
".",
"entity_list",
".",
"append",
"(",
"make_entity_dict",
"(",
"chain_indices",
",",
"sequence",
",",
"description",
",",
... | Set the entity level information for the structure.
:param chain_indices: the indices of the chains for this entity
:param sequence: the one letter code sequence for this entity
:param description: the description for this entity
:param entity_type: the entity type (polymer,non-polymer,water) | [
"Set",
"the",
"entity",
"level",
"information",
"for",
"the",
"structure",
".",
":",
"param",
"chain_indices",
":",
"the",
"indices",
"of",
"the",
"chains",
"for",
"this",
"entity",
":",
"param",
"sequence",
":",
"the",
"one",
"letter",
"code",
"sequence",
... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L345-L352 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_group_info | def set_group_info(self, group_name, group_number, insertion_code,
group_type, atom_count, bond_count, single_letter_code,
sequence_index, secondary_structure_type):
"""Set the information for a group
:param group_name: the name of this group,e.g. LYS
:param group_number: the residue number of this group
:param insertion_code: the insertion code for this group
:param group_type: a string indicating the type of group (as found in the chemcomp dictionary.
Empty string if none available.
:param atom_count: the number of atoms in the group
:param bond_count: the number of unique bonds in the group
:param single_letter_code: the single letter code of the group
:param sequence_index: the index of this group in the sequence defined by the enttiy
:param secondary_structure_type: the type of secondary structure used (types are according to DSSP and
number to type mappings are defined in the specification)
"""
# Add the group to the overall list - unless it's the first time round
if self.current_group is not None:
self.group_list.append(self.current_group)
# Add the group level information
self.group_id_list.append(group_number)
self.ins_code_list.append(insertion_code)
self.sequence_index_list.append(sequence_index)
self.sec_struct_list.append(secondary_structure_type)
self.current_group = Group()
self.current_group.group_name = group_name
self.current_group.group_type = group_type
self.current_group.single_letter_code = single_letter_code | python | def set_group_info(self, group_name, group_number, insertion_code,
group_type, atom_count, bond_count, single_letter_code,
sequence_index, secondary_structure_type):
"""Set the information for a group
:param group_name: the name of this group,e.g. LYS
:param group_number: the residue number of this group
:param insertion_code: the insertion code for this group
:param group_type: a string indicating the type of group (as found in the chemcomp dictionary.
Empty string if none available.
:param atom_count: the number of atoms in the group
:param bond_count: the number of unique bonds in the group
:param single_letter_code: the single letter code of the group
:param sequence_index: the index of this group in the sequence defined by the enttiy
:param secondary_structure_type: the type of secondary structure used (types are according to DSSP and
number to type mappings are defined in the specification)
"""
# Add the group to the overall list - unless it's the first time round
if self.current_group is not None:
self.group_list.append(self.current_group)
# Add the group level information
self.group_id_list.append(group_number)
self.ins_code_list.append(insertion_code)
self.sequence_index_list.append(sequence_index)
self.sec_struct_list.append(secondary_structure_type)
self.current_group = Group()
self.current_group.group_name = group_name
self.current_group.group_type = group_type
self.current_group.single_letter_code = single_letter_code | [
"def",
"set_group_info",
"(",
"self",
",",
"group_name",
",",
"group_number",
",",
"insertion_code",
",",
"group_type",
",",
"atom_count",
",",
"bond_count",
",",
"single_letter_code",
",",
"sequence_index",
",",
"secondary_structure_type",
")",
":",
"# Add the group ... | Set the information for a group
:param group_name: the name of this group,e.g. LYS
:param group_number: the residue number of this group
:param insertion_code: the insertion code for this group
:param group_type: a string indicating the type of group (as found in the chemcomp dictionary.
Empty string if none available.
:param atom_count: the number of atoms in the group
:param bond_count: the number of unique bonds in the group
:param single_letter_code: the single letter code of the group
:param sequence_index: the index of this group in the sequence defined by the enttiy
:param secondary_structure_type: the type of secondary structure used (types are according to DSSP and
number to type mappings are defined in the specification) | [
"Set",
"the",
"information",
"for",
"a",
"group",
":",
"param",
"group_name",
":",
"the",
"name",
"of",
"this",
"group",
"e",
".",
"g",
".",
"LYS",
":",
"param",
"group_number",
":",
"the",
"residue",
"number",
"of",
"this",
"group",
":",
"param",
"ins... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L355-L383 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_xtal_info | def set_xtal_info(self, space_group, unit_cell):
"""Set the crystallographic information for the structure
:param space_group: the space group name, e.g. "P 21 21 21"
:param unit_cell: an array of length 6 with the unit cell parameters in order: a, b, c, alpha, beta, gamma
"""
self.space_group = space_group
self.unit_cell = unit_cell | python | def set_xtal_info(self, space_group, unit_cell):
"""Set the crystallographic information for the structure
:param space_group: the space group name, e.g. "P 21 21 21"
:param unit_cell: an array of length 6 with the unit cell parameters in order: a, b, c, alpha, beta, gamma
"""
self.space_group = space_group
self.unit_cell = unit_cell | [
"def",
"set_xtal_info",
"(",
"self",
",",
"space_group",
",",
"unit_cell",
")",
":",
"self",
".",
"space_group",
"=",
"space_group",
"self",
".",
"unit_cell",
"=",
"unit_cell"
] | Set the crystallographic information for the structure
:param space_group: the space group name, e.g. "P 21 21 21"
:param unit_cell: an array of length 6 with the unit cell parameters in order: a, b, c, alpha, beta, gamma | [
"Set",
"the",
"crystallographic",
"information",
"for",
"the",
"structure",
":",
"param",
"space_group",
":",
"the",
"space",
"group",
"name",
"e",
".",
"g",
".",
"P",
"21",
"21",
"21",
":",
"param",
"unit_cell",
":",
"an",
"array",
"of",
"length",
"6",
... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L394-L400 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_header_info | def set_header_info(self, r_free, r_work, resolution, title,
deposition_date, release_date, experimental_methods):
"""Sets the header information.
:param r_free: the measured R-Free for the structure
:param r_work: the measure R-Work for the structure
:param resolution: the resolution of the structure
:param title: the title of the structure
:param deposition_date: the deposition date of the structure
:param release_date: the release date of the structure
:param experimnetal_methods: the list of experimental methods in the structure
"""
self.r_free = r_free
self.r_work = r_work
self.resolution = resolution
self.title = title
self.deposition_date = deposition_date
self.release_date = release_date
self.experimental_methods = experimental_methods | python | def set_header_info(self, r_free, r_work, resolution, title,
deposition_date, release_date, experimental_methods):
"""Sets the header information.
:param r_free: the measured R-Free for the structure
:param r_work: the measure R-Work for the structure
:param resolution: the resolution of the structure
:param title: the title of the structure
:param deposition_date: the deposition date of the structure
:param release_date: the release date of the structure
:param experimnetal_methods: the list of experimental methods in the structure
"""
self.r_free = r_free
self.r_work = r_work
self.resolution = resolution
self.title = title
self.deposition_date = deposition_date
self.release_date = release_date
self.experimental_methods = experimental_methods | [
"def",
"set_header_info",
"(",
"self",
",",
"r_free",
",",
"r_work",
",",
"resolution",
",",
"title",
",",
"deposition_date",
",",
"release_date",
",",
"experimental_methods",
")",
":",
"self",
".",
"r_free",
"=",
"r_free",
"self",
".",
"r_work",
"=",
"r_wor... | Sets the header information.
:param r_free: the measured R-Free for the structure
:param r_work: the measure R-Work for the structure
:param resolution: the resolution of the structure
:param title: the title of the structure
:param deposition_date: the deposition date of the structure
:param release_date: the release date of the structure
:param experimnetal_methods: the list of experimental methods in the structure | [
"Sets",
"the",
"header",
"information",
".",
":",
"param",
"r_free",
":",
"the",
"measured",
"R",
"-",
"Free",
"for",
"the",
"structure",
":",
"param",
"r_work",
":",
"the",
"measure",
"R",
"-",
"Work",
"for",
"the",
"structure",
":",
"param",
"resolutio... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L402-L419 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_bio_assembly_trans | def set_bio_assembly_trans(self, bio_assembly_index, input_chain_indices, input_transform):
"""Set the Bioassembly transformation information. A single bioassembly can have multiple transforms,
:param bio_assembly_index: the integer index of the bioassembly
:param input_chain_indices: the list of integer indices for the chains of this bioassembly
:param input_transformation: the list of doubles for the transform of this bioassmbly transform"""
this_bioass = None
for bioass in self.bio_assembly:
if bioass['name'] == str(bio_assembly_index):
this_bioass = bioass
break
if not this_bioass:
this_bioass = {"name": str(bio_assembly_index), 'transformList': []}
else:
self.bio_assembly.remove(this_bioass)
this_bioass['transformList'].append({'chainIndexList':input_chain_indices,'matrix': input_transform})
self.bio_assembly.append(this_bioass) | python | def set_bio_assembly_trans(self, bio_assembly_index, input_chain_indices, input_transform):
"""Set the Bioassembly transformation information. A single bioassembly can have multiple transforms,
:param bio_assembly_index: the integer index of the bioassembly
:param input_chain_indices: the list of integer indices for the chains of this bioassembly
:param input_transformation: the list of doubles for the transform of this bioassmbly transform"""
this_bioass = None
for bioass in self.bio_assembly:
if bioass['name'] == str(bio_assembly_index):
this_bioass = bioass
break
if not this_bioass:
this_bioass = {"name": str(bio_assembly_index), 'transformList': []}
else:
self.bio_assembly.remove(this_bioass)
this_bioass['transformList'].append({'chainIndexList':input_chain_indices,'matrix': input_transform})
self.bio_assembly.append(this_bioass) | [
"def",
"set_bio_assembly_trans",
"(",
"self",
",",
"bio_assembly_index",
",",
"input_chain_indices",
",",
"input_transform",
")",
":",
"this_bioass",
"=",
"None",
"for",
"bioass",
"in",
"self",
".",
"bio_assembly",
":",
"if",
"bioass",
"[",
"'name'",
"]",
"==",
... | Set the Bioassembly transformation information. A single bioassembly can have multiple transforms,
:param bio_assembly_index: the integer index of the bioassembly
:param input_chain_indices: the list of integer indices for the chains of this bioassembly
:param input_transformation: the list of doubles for the transform of this bioassmbly transform | [
"Set",
"the",
"Bioassembly",
"transformation",
"information",
".",
"A",
"single",
"bioassembly",
"can",
"have",
"multiple",
"transforms",
":",
"param",
"bio_assembly_index",
":",
"the",
"integer",
"index",
"of",
"the",
"bioassembly",
":",
"param",
"input_chain_indic... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L422-L437 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.finalize_structure | def finalize_structure(self):
"""Any functions needed to cleanup the structure."""
self.group_list.append(self.current_group)
group_set = get_unique_groups(self.group_list)
for item in self.group_list:
self.group_type_list.append(group_set.index(item))
self.group_list = [x.convert_to_dict() for x in group_set] | python | def finalize_structure(self):
"""Any functions needed to cleanup the structure."""
self.group_list.append(self.current_group)
group_set = get_unique_groups(self.group_list)
for item in self.group_list:
self.group_type_list.append(group_set.index(item))
self.group_list = [x.convert_to_dict() for x in group_set] | [
"def",
"finalize_structure",
"(",
"self",
")",
":",
"self",
".",
"group_list",
".",
"append",
"(",
"self",
".",
"current_group",
")",
"group_set",
"=",
"get_unique_groups",
"(",
"self",
".",
"group_list",
")",
"for",
"item",
"in",
"self",
".",
"group_list",
... | Any functions needed to cleanup the structure. | [
"Any",
"functions",
"needed",
"to",
"cleanup",
"the",
"structure",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L440-L446 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_group_bond | def set_group_bond(self, atom_index_one, atom_index_two, bond_order):
"""Add bonds within a group.
:param atom_index_one: the integer atom index (in the group) of the first partner in the bond
:param atom_index_two: the integer atom index (in the group) of the second partner in the bond
:param bond_order: the integer bond order
"""
self.current_group.bond_atom_list.append(atom_index_one)
self.current_group.bond_atom_list.append(atom_index_two)
self.current_group.bond_order_list.append(bond_order) | python | def set_group_bond(self, atom_index_one, atom_index_two, bond_order):
"""Add bonds within a group.
:param atom_index_one: the integer atom index (in the group) of the first partner in the bond
:param atom_index_two: the integer atom index (in the group) of the second partner in the bond
:param bond_order: the integer bond order
"""
self.current_group.bond_atom_list.append(atom_index_one)
self.current_group.bond_atom_list.append(atom_index_two)
self.current_group.bond_order_list.append(bond_order) | [
"def",
"set_group_bond",
"(",
"self",
",",
"atom_index_one",
",",
"atom_index_two",
",",
"bond_order",
")",
":",
"self",
".",
"current_group",
".",
"bond_atom_list",
".",
"append",
"(",
"atom_index_one",
")",
"self",
".",
"current_group",
".",
"bond_atom_list",
... | Add bonds within a group.
:param atom_index_one: the integer atom index (in the group) of the first partner in the bond
:param atom_index_two: the integer atom index (in the group) of the second partner in the bond
:param bond_order: the integer bond order | [
"Add",
"bonds",
"within",
"a",
"group",
".",
":",
"param",
"atom_index_one",
":",
"the",
"integer",
"atom",
"index",
"(",
"in",
"the",
"group",
")",
"of",
"the",
"first",
"partner",
"in",
"the",
"bond",
":",
"param",
"atom_index_two",
":",
"the",
"intege... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L449-L457 | train |
rcsb/mmtf-python | mmtf/api/mmtf_writer.py | MMTFEncoder.set_inter_group_bond | def set_inter_group_bond(self, atom_index_one, atom_index_two, bond_order):
"""Add bonds between groups.
:param atom_index_one: the integer atom index (in the structure) of the first partner in the bond
:param atom_index_two: the integer atom index (in the structure) of the second partner in the bond
:param bond_order the bond order
"""
self.bond_atom_list.append(atom_index_one)
self.bond_atom_list.append(atom_index_two)
self.bond_order_list.append(bond_order) | python | def set_inter_group_bond(self, atom_index_one, atom_index_two, bond_order):
"""Add bonds between groups.
:param atom_index_one: the integer atom index (in the structure) of the first partner in the bond
:param atom_index_two: the integer atom index (in the structure) of the second partner in the bond
:param bond_order the bond order
"""
self.bond_atom_list.append(atom_index_one)
self.bond_atom_list.append(atom_index_two)
self.bond_order_list.append(bond_order) | [
"def",
"set_inter_group_bond",
"(",
"self",
",",
"atom_index_one",
",",
"atom_index_two",
",",
"bond_order",
")",
":",
"self",
".",
"bond_atom_list",
".",
"append",
"(",
"atom_index_one",
")",
"self",
".",
"bond_atom_list",
".",
"append",
"(",
"atom_index_two",
... | Add bonds between groups.
:param atom_index_one: the integer atom index (in the structure) of the first partner in the bond
:param atom_index_two: the integer atom index (in the structure) of the second partner in the bond
:param bond_order the bond order | [
"Add",
"bonds",
"between",
"groups",
".",
":",
"param",
"atom_index_one",
":",
"the",
"integer",
"atom",
"index",
"(",
"in",
"the",
"structure",
")",
"of",
"the",
"first",
"partner",
"in",
"the",
"bond",
":",
"param",
"atom_index_two",
":",
"the",
"integer... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_writer.py#L460-L468 | train |
rcsb/mmtf-python | mmtf/codecs/encoders/encoders.py | run_length_encode | def run_length_encode(in_array):
"""A function to run length decode an int array.
:param in_array: the inptut array of integers
:return the encoded integer array"""
if(len(in_array)==0):
return []
curr_ans = in_array[0]
out_array = [curr_ans]
counter = 1
for in_int in in_array[1:]:
if in_int == curr_ans:
counter+=1
else:
out_array.append(counter)
out_array.append(in_int)
curr_ans = in_int
counter = 1
# Add the final counter
out_array.append(counter)
return out_array | python | def run_length_encode(in_array):
"""A function to run length decode an int array.
:param in_array: the inptut array of integers
:return the encoded integer array"""
if(len(in_array)==0):
return []
curr_ans = in_array[0]
out_array = [curr_ans]
counter = 1
for in_int in in_array[1:]:
if in_int == curr_ans:
counter+=1
else:
out_array.append(counter)
out_array.append(in_int)
curr_ans = in_int
counter = 1
# Add the final counter
out_array.append(counter)
return out_array | [
"def",
"run_length_encode",
"(",
"in_array",
")",
":",
"if",
"(",
"len",
"(",
"in_array",
")",
"==",
"0",
")",
":",
"return",
"[",
"]",
"curr_ans",
"=",
"in_array",
"[",
"0",
"]",
"out_array",
"=",
"[",
"curr_ans",
"]",
"counter",
"=",
"1",
"for",
... | A function to run length decode an int array.
:param in_array: the inptut array of integers
:return the encoded integer array | [
"A",
"function",
"to",
"run",
"length",
"decode",
"an",
"int",
"array",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/codecs/encoders/encoders.py#L1-L21 | train |
rcsb/mmtf-python | mmtf/codecs/encoders/encoders.py | delta_encode | def delta_encode(in_array):
"""A function to delta decode an int array.
:param in_array: the inut array to be delta encoded
:return the encoded integer array"""
if(len(in_array)==0):
return []
curr_ans = in_array[0]
out_array = [curr_ans]
for in_int in in_array[1:]:
out_array.append(in_int-curr_ans)
curr_ans = in_int
return out_array | python | def delta_encode(in_array):
"""A function to delta decode an int array.
:param in_array: the inut array to be delta encoded
:return the encoded integer array"""
if(len(in_array)==0):
return []
curr_ans = in_array[0]
out_array = [curr_ans]
for in_int in in_array[1:]:
out_array.append(in_int-curr_ans)
curr_ans = in_int
return out_array | [
"def",
"delta_encode",
"(",
"in_array",
")",
":",
"if",
"(",
"len",
"(",
"in_array",
")",
"==",
"0",
")",
":",
"return",
"[",
"]",
"curr_ans",
"=",
"in_array",
"[",
"0",
"]",
"out_array",
"=",
"[",
"curr_ans",
"]",
"for",
"in_int",
"in",
"in_array",
... | A function to delta decode an int array.
:param in_array: the inut array to be delta encoded
:return the encoded integer array | [
"A",
"function",
"to",
"delta",
"decode",
"an",
"int",
"array",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/codecs/encoders/encoders.py#L23-L35 | train |
rcsb/mmtf-python | mmtf/codecs/default_codec.py | decode_array | def decode_array(input_array):
"""Parse the header of an input byte array and then decode using the input array,
the codec and the appropirate parameter.
:param input_array: the array to be decoded
:return the decoded array"""
codec, length, param, input_array = parse_header(input_array)
return codec_dict[codec].decode(input_array, param) | python | def decode_array(input_array):
"""Parse the header of an input byte array and then decode using the input array,
the codec and the appropirate parameter.
:param input_array: the array to be decoded
:return the decoded array"""
codec, length, param, input_array = parse_header(input_array)
return codec_dict[codec].decode(input_array, param) | [
"def",
"decode_array",
"(",
"input_array",
")",
":",
"codec",
",",
"length",
",",
"param",
",",
"input_array",
"=",
"parse_header",
"(",
"input_array",
")",
"return",
"codec_dict",
"[",
"codec",
"]",
".",
"decode",
"(",
"input_array",
",",
"param",
")"
] | Parse the header of an input byte array and then decode using the input array,
the codec and the appropirate parameter.
:param input_array: the array to be decoded
:return the decoded array | [
"Parse",
"the",
"header",
"of",
"an",
"input",
"byte",
"array",
"and",
"then",
"decode",
"using",
"the",
"input",
"array",
"the",
"codec",
"and",
"the",
"appropirate",
"parameter",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/codecs/default_codec.py#L12-L19 | train |
rcsb/mmtf-python | mmtf/codecs/default_codec.py | encode_array | def encode_array(input_array, codec, param):
"""Encode the array using the method and then add the header to this array.
:param input_array: the array to be encoded
:param codec: the integer index of the codec to use
:param param: the integer parameter to use in the function
:return an array with the header added to the fornt"""
return add_header(codec_dict[codec].encode(input_array, param), codec, len(input_array), param) | python | def encode_array(input_array, codec, param):
"""Encode the array using the method and then add the header to this array.
:param input_array: the array to be encoded
:param codec: the integer index of the codec to use
:param param: the integer parameter to use in the function
:return an array with the header added to the fornt"""
return add_header(codec_dict[codec].encode(input_array, param), codec, len(input_array), param) | [
"def",
"encode_array",
"(",
"input_array",
",",
"codec",
",",
"param",
")",
":",
"return",
"add_header",
"(",
"codec_dict",
"[",
"codec",
"]",
".",
"encode",
"(",
"input_array",
",",
"param",
")",
",",
"codec",
",",
"len",
"(",
"input_array",
")",
",",
... | Encode the array using the method and then add the header to this array.
:param input_array: the array to be encoded
:param codec: the integer index of the codec to use
:param param: the integer parameter to use in the function
:return an array with the header added to the fornt | [
"Encode",
"the",
"array",
"using",
"the",
"method",
"and",
"then",
"add",
"the",
"header",
"to",
"this",
"array",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/codecs/default_codec.py#L22-L29 | train |
rcsb/mmtf-python | mmtf/codecs/decoders/decoders.py | run_length_decode | def run_length_decode(in_array):
"""A function to run length decode an int array.
:param in_array: the input array of integers
:return the decoded array"""
switch=False
out_array=[]
for item in in_array:
if switch==False:
this_item = item
switch=True
else:
switch=False
out_array.extend([this_item]*int(item))
return out_array | python | def run_length_decode(in_array):
"""A function to run length decode an int array.
:param in_array: the input array of integers
:return the decoded array"""
switch=False
out_array=[]
for item in in_array:
if switch==False:
this_item = item
switch=True
else:
switch=False
out_array.extend([this_item]*int(item))
return out_array | [
"def",
"run_length_decode",
"(",
"in_array",
")",
":",
"switch",
"=",
"False",
"out_array",
"=",
"[",
"]",
"for",
"item",
"in",
"in_array",
":",
"if",
"switch",
"==",
"False",
":",
"this_item",
"=",
"item",
"switch",
"=",
"True",
"else",
":",
"switch",
... | A function to run length decode an int array.
:param in_array: the input array of integers
:return the decoded array | [
"A",
"function",
"to",
"run",
"length",
"decode",
"an",
"int",
"array",
".",
":",
"param",
"in_array",
":",
"the",
"input",
"array",
"of",
"integers",
":",
"return",
"the",
"decoded",
"array"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/codecs/decoders/decoders.py#L1-L14 | train |
rcsb/mmtf-python | mmtf/codecs/decoders/decoders.py | delta_decode | def delta_decode(in_array):
"""A function to delta decode an int array.
:param in_array: the input array of integers
:return the decoded array"""
if len(in_array) == 0:
return []
this_ans = in_array[0]
out_array = [this_ans]
for i in range(1, len(in_array)):
this_ans += in_array[i]
out_array.append(this_ans)
return out_array | python | def delta_decode(in_array):
"""A function to delta decode an int array.
:param in_array: the input array of integers
:return the decoded array"""
if len(in_array) == 0:
return []
this_ans = in_array[0]
out_array = [this_ans]
for i in range(1, len(in_array)):
this_ans += in_array[i]
out_array.append(this_ans)
return out_array | [
"def",
"delta_decode",
"(",
"in_array",
")",
":",
"if",
"len",
"(",
"in_array",
")",
"==",
"0",
":",
"return",
"[",
"]",
"this_ans",
"=",
"in_array",
"[",
"0",
"]",
"out_array",
"=",
"[",
"this_ans",
"]",
"for",
"i",
"in",
"range",
"(",
"1",
",",
... | A function to delta decode an int array.
:param in_array: the input array of integers
:return the decoded array | [
"A",
"function",
"to",
"delta",
"decode",
"an",
"int",
"array",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/codecs/decoders/decoders.py#L16-L28 | train |
rcsb/mmtf-python | mmtf/converters/numpy_converters.py | convert_bytes_to_ints | def convert_bytes_to_ints(in_bytes, num):
"""Convert a byte array into an integer array. The number of bytes forming an integer
is defined by num
:param in_bytes: the input bytes
:param num: the number of bytes per int
:return the integer array"""
dt = numpy.dtype('>i' + str(num))
return numpy.frombuffer(in_bytes, dt) | python | def convert_bytes_to_ints(in_bytes, num):
"""Convert a byte array into an integer array. The number of bytes forming an integer
is defined by num
:param in_bytes: the input bytes
:param num: the number of bytes per int
:return the integer array"""
dt = numpy.dtype('>i' + str(num))
return numpy.frombuffer(in_bytes, dt) | [
"def",
"convert_bytes_to_ints",
"(",
"in_bytes",
",",
"num",
")",
":",
"dt",
"=",
"numpy",
".",
"dtype",
"(",
"'>i'",
"+",
"str",
"(",
"num",
")",
")",
"return",
"numpy",
".",
"frombuffer",
"(",
"in_bytes",
",",
"dt",
")"
] | Convert a byte array into an integer array. The number of bytes forming an integer
is defined by num
:param in_bytes: the input bytes
:param num: the number of bytes per int
:return the integer array | [
"Convert",
"a",
"byte",
"array",
"into",
"an",
"integer",
"array",
".",
"The",
"number",
"of",
"bytes",
"forming",
"an",
"integer",
"is",
"defined",
"by",
"num"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/numpy_converters.py#L7-L15 | train |
rcsb/mmtf-python | mmtf/converters/numpy_converters.py | decode_chain_list | def decode_chain_list(in_bytes):
"""Convert a list of bytes to a list of strings. Each string is of length mmtf.CHAIN_LEN
:param in_bytes: the input bytes
:return the decoded list of strings"""
bstrings = numpy.frombuffer(in_bytes, numpy.dtype('S' + str(mmtf.utils.constants.CHAIN_LEN)))
return [s.decode("ascii").strip(mmtf.utils.constants.NULL_BYTE) for s in bstrings] | python | def decode_chain_list(in_bytes):
"""Convert a list of bytes to a list of strings. Each string is of length mmtf.CHAIN_LEN
:param in_bytes: the input bytes
:return the decoded list of strings"""
bstrings = numpy.frombuffer(in_bytes, numpy.dtype('S' + str(mmtf.utils.constants.CHAIN_LEN)))
return [s.decode("ascii").strip(mmtf.utils.constants.NULL_BYTE) for s in bstrings] | [
"def",
"decode_chain_list",
"(",
"in_bytes",
")",
":",
"bstrings",
"=",
"numpy",
".",
"frombuffer",
"(",
"in_bytes",
",",
"numpy",
".",
"dtype",
"(",
"'S'",
"+",
"str",
"(",
"mmtf",
".",
"utils",
".",
"constants",
".",
"CHAIN_LEN",
")",
")",
")",
"retu... | Convert a list of bytes to a list of strings. Each string is of length mmtf.CHAIN_LEN
:param in_bytes: the input bytes
:return the decoded list of strings | [
"Convert",
"a",
"list",
"of",
"bytes",
"to",
"a",
"list",
"of",
"strings",
".",
"Each",
"string",
"is",
"of",
"length",
"mmtf",
".",
"CHAIN_LEN"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/numpy_converters.py#L17-L23 | train |
rcsb/mmtf-python | mmtf/converters/numpy_converters.py | recursive_index_decode | def recursive_index_decode(int_array, max=32767, min=-32768):
"""Unpack an array of integers using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index decoding"""
out_arr = []
decoded_val = 0
for item in int_array.tolist():
if item==max or item==min:
decoded_val += item
else:
decoded_val += item
out_arr.append(decoded_val)
decoded_val = 0
return numpy.asarray(out_arr,dtype=numpy.int32) | python | def recursive_index_decode(int_array, max=32767, min=-32768):
"""Unpack an array of integers using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index decoding"""
out_arr = []
decoded_val = 0
for item in int_array.tolist():
if item==max or item==min:
decoded_val += item
else:
decoded_val += item
out_arr.append(decoded_val)
decoded_val = 0
return numpy.asarray(out_arr,dtype=numpy.int32) | [
"def",
"recursive_index_decode",
"(",
"int_array",
",",
"max",
"=",
"32767",
",",
"min",
"=",
"-",
"32768",
")",
":",
"out_arr",
"=",
"[",
"]",
"decoded_val",
"=",
"0",
"for",
"item",
"in",
"int_array",
".",
"tolist",
"(",
")",
":",
"if",
"item",
"==... | Unpack an array of integers using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index decoding | [
"Unpack",
"an",
"array",
"of",
"integers",
"using",
"recursive",
"indexing",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/numpy_converters.py#L32-L48 | train |
rcsb/mmtf-python | mmtf/api/mmtf_reader.py | MMTFDecoder.get_coords | def get_coords(self):
"""Utility function to get the coordinates as a single list of tuples."""
out_list = []
for i in range(len(self.x_coord_list)):
out_list.append((self.x_coord_list[i],self.y_coord_list[i],self.z_coord_list[i],))
return out_list | python | def get_coords(self):
"""Utility function to get the coordinates as a single list of tuples."""
out_list = []
for i in range(len(self.x_coord_list)):
out_list.append((self.x_coord_list[i],self.y_coord_list[i],self.z_coord_list[i],))
return out_list | [
"def",
"get_coords",
"(",
"self",
")",
":",
"out_list",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"x_coord_list",
")",
")",
":",
"out_list",
".",
"append",
"(",
"(",
"self",
".",
"x_coord_list",
"[",
"i",
"]",
",",
"... | Utility function to get the coordinates as a single list of tuples. | [
"Utility",
"function",
"to",
"get",
"the",
"coordinates",
"as",
"a",
"single",
"list",
"of",
"tuples",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_reader.py#L14-L19 | train |
rcsb/mmtf-python | mmtf/api/mmtf_reader.py | MMTFDecoder.decode_data | def decode_data(self, input_data):
"""Function to decode the input data and place it onto the class.
:param input_data: the input data as a dict"""
self.group_type_list = decode_array(input_data["groupTypeList"])
self.x_coord_list = decode_array(input_data["xCoordList"])
self.y_coord_list = decode_array(input_data["yCoordList"])
self.z_coord_list = decode_array(input_data["zCoordList"])
if "bFactorList" in input_data:
self.b_factor_list = decode_array(input_data["bFactorList"])
else:
self.b_factor_list = []
if "occupancyList" in input_data:
self.occupancy_list = decode_array(input_data["occupancyList"])
else:
self.occupancy_list = []
if "atomIdList" in input_data:
self.atom_id_list = decode_array(input_data["atomIdList"])
else:
self.atom_id_list = []
if "altLocList" in input_data:
self.alt_loc_list = decode_array(input_data["altLocList"])
else:
self.alt_loc_list = []
if "insCodeList" in input_data:
self.ins_code_list = decode_array(input_data["insCodeList"])
else:
self.ins_code_list = []
self.group_id_list = decode_array(input_data["groupIdList"])
self.group_list = input_data["groupList"]
if "sequenceIndexList" in input_data:
self.sequence_index_list = decode_array(input_data["sequenceIndexList"])
else:
self.sequence_index_list = []
self.chains_per_model = input_data["chainsPerModel"]
self.groups_per_chain = input_data["groupsPerChain"]
if "chainNameList" in input_data:
self.chain_name_list = decode_array(input_data["chainNameList"])
else:
self.chain_name_list = []
self.chain_id_list = decode_array(input_data["chainIdList"])
if "spaceGroup" in input_data:
self.space_group = input_data["spaceGroup"]
else:
self.space_group = None
if "bondAtomList" in input_data:
self.bond_atom_list = decode_array(input_data["bondAtomList"])
else:
self.bond_atom_list = None
if "bondOrderList" in input_data:
self.bond_order_list = decode_array(input_data["bondOrderList"])
else:
self.bond_order_list = None
if sys.version_info[0] < 3:
if "mmtfVersion" in input_data:
self.mmtf_version = input_data["mmtfVersion"]
else:
self.mmtf_version = None
if "mmtfProducer" in input_data:
self.mmtf_producer = input_data["mmtfProducer"]
else:
self.mmtf_producer = None
if "structureId" in input_data:
self.structure_id = input_data["structureId"]
else:
self.structure_id = None
else:
if "mmtfVersion" in input_data:
self.mmtf_version = input_data["mmtfVersion"]
else:
self.mmtf_version = None
if "mmtfProducer" in input_data:
self.mmtf_producer = input_data["mmtfProducer"]
else:
self.mmtf_producer = None
if "structureId" in input_data:
self.structure_id = input_data["structureId"]
else:
self.structure_id = None
if "title" in input_data:
if sys.version_info[0] < 3:
self.title = input_data["title"]
else:
self.title = input_data["title"]
if "experimentalMethods" in input_data:
self.experimental_methods = input_data["experimentalMethods"]
else:
self.experimental_methods = None
if "depositionDate" in input_data:
self.deposition_date = input_data["depositionDate"]
else:
self.deposition_date = None
if "releaseDate" in input_data:
self.release_date = input_data["releaseDate"]
else:
self.release_date = None
if "entityList" in input_data:
self.entity_list = input_data["entityList"]
else:
self.entity_list = []
if "bioAssemblyList" in input_data:
self.bio_assembly = input_data["bioAssemblyList"]
else:
self.bio_assembly = []
if "rFree" in input_data:
self.r_free = input_data["rFree"]
else:
self.r_free = None
if "rWork" in input_data:
self.r_work = input_data["rWork"]
else:
self.r_work = None
if "resolution" in input_data:
self.resolution = input_data["resolution"]
else:
self.resolution = None
if "unitCell" in input_data:
self.unit_cell = input_data["unitCell"]
else:
self.unit_cell = None
if "secStructList" in input_data:
self.sec_struct_list = decode_array(input_data["secStructList"])
# Now all the numbers to defien the
self.num_bonds = int(input_data["numBonds"])
self.num_chains = int(input_data["numChains"])
self.num_models = int(input_data["numModels"])
self.num_atoms = int(input_data["numAtoms"])
self.num_groups = int(input_data["numGroups"]) | python | def decode_data(self, input_data):
"""Function to decode the input data and place it onto the class.
:param input_data: the input data as a dict"""
self.group_type_list = decode_array(input_data["groupTypeList"])
self.x_coord_list = decode_array(input_data["xCoordList"])
self.y_coord_list = decode_array(input_data["yCoordList"])
self.z_coord_list = decode_array(input_data["zCoordList"])
if "bFactorList" in input_data:
self.b_factor_list = decode_array(input_data["bFactorList"])
else:
self.b_factor_list = []
if "occupancyList" in input_data:
self.occupancy_list = decode_array(input_data["occupancyList"])
else:
self.occupancy_list = []
if "atomIdList" in input_data:
self.atom_id_list = decode_array(input_data["atomIdList"])
else:
self.atom_id_list = []
if "altLocList" in input_data:
self.alt_loc_list = decode_array(input_data["altLocList"])
else:
self.alt_loc_list = []
if "insCodeList" in input_data:
self.ins_code_list = decode_array(input_data["insCodeList"])
else:
self.ins_code_list = []
self.group_id_list = decode_array(input_data["groupIdList"])
self.group_list = input_data["groupList"]
if "sequenceIndexList" in input_data:
self.sequence_index_list = decode_array(input_data["sequenceIndexList"])
else:
self.sequence_index_list = []
self.chains_per_model = input_data["chainsPerModel"]
self.groups_per_chain = input_data["groupsPerChain"]
if "chainNameList" in input_data:
self.chain_name_list = decode_array(input_data["chainNameList"])
else:
self.chain_name_list = []
self.chain_id_list = decode_array(input_data["chainIdList"])
if "spaceGroup" in input_data:
self.space_group = input_data["spaceGroup"]
else:
self.space_group = None
if "bondAtomList" in input_data:
self.bond_atom_list = decode_array(input_data["bondAtomList"])
else:
self.bond_atom_list = None
if "bondOrderList" in input_data:
self.bond_order_list = decode_array(input_data["bondOrderList"])
else:
self.bond_order_list = None
if sys.version_info[0] < 3:
if "mmtfVersion" in input_data:
self.mmtf_version = input_data["mmtfVersion"]
else:
self.mmtf_version = None
if "mmtfProducer" in input_data:
self.mmtf_producer = input_data["mmtfProducer"]
else:
self.mmtf_producer = None
if "structureId" in input_data:
self.structure_id = input_data["structureId"]
else:
self.structure_id = None
else:
if "mmtfVersion" in input_data:
self.mmtf_version = input_data["mmtfVersion"]
else:
self.mmtf_version = None
if "mmtfProducer" in input_data:
self.mmtf_producer = input_data["mmtfProducer"]
else:
self.mmtf_producer = None
if "structureId" in input_data:
self.structure_id = input_data["structureId"]
else:
self.structure_id = None
if "title" in input_data:
if sys.version_info[0] < 3:
self.title = input_data["title"]
else:
self.title = input_data["title"]
if "experimentalMethods" in input_data:
self.experimental_methods = input_data["experimentalMethods"]
else:
self.experimental_methods = None
if "depositionDate" in input_data:
self.deposition_date = input_data["depositionDate"]
else:
self.deposition_date = None
if "releaseDate" in input_data:
self.release_date = input_data["releaseDate"]
else:
self.release_date = None
if "entityList" in input_data:
self.entity_list = input_data["entityList"]
else:
self.entity_list = []
if "bioAssemblyList" in input_data:
self.bio_assembly = input_data["bioAssemblyList"]
else:
self.bio_assembly = []
if "rFree" in input_data:
self.r_free = input_data["rFree"]
else:
self.r_free = None
if "rWork" in input_data:
self.r_work = input_data["rWork"]
else:
self.r_work = None
if "resolution" in input_data:
self.resolution = input_data["resolution"]
else:
self.resolution = None
if "unitCell" in input_data:
self.unit_cell = input_data["unitCell"]
else:
self.unit_cell = None
if "secStructList" in input_data:
self.sec_struct_list = decode_array(input_data["secStructList"])
# Now all the numbers to defien the
self.num_bonds = int(input_data["numBonds"])
self.num_chains = int(input_data["numChains"])
self.num_models = int(input_data["numModels"])
self.num_atoms = int(input_data["numAtoms"])
self.num_groups = int(input_data["numGroups"]) | [
"def",
"decode_data",
"(",
"self",
",",
"input_data",
")",
":",
"self",
".",
"group_type_list",
"=",
"decode_array",
"(",
"input_data",
"[",
"\"groupTypeList\"",
"]",
")",
"self",
".",
"x_coord_list",
"=",
"decode_array",
"(",
"input_data",
"[",
"\"xCoordList\""... | Function to decode the input data and place it onto the class.
:param input_data: the input data as a dict | [
"Function",
"to",
"decode",
"the",
"input",
"data",
"and",
"place",
"it",
"onto",
"the",
"class",
".",
":",
"param",
"input_data",
":",
"the",
"input",
"data",
"as",
"a",
"dict"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_reader.py#L25-L151 | train |
rcsb/mmtf-python | mmtf/api/mmtf_reader.py | MMTFDecoder.pass_data_on | def pass_data_on(self, data_setters):
"""Write the data from the getters to the setters.
:param data_setters: a series of functions that can fill a chemical
data structure
:type data_setters: DataTransferInterface
"""
data_setters.init_structure(self.num_bonds, len(self.x_coord_list), len(self.group_type_list),
len(self.chain_id_list), len(self.chains_per_model), self.structure_id)
decoder_utils.add_entity_info(self, data_setters)
decoder_utils.add_atomic_information(self, data_setters)
decoder_utils.add_header_info(self, data_setters)
decoder_utils.add_xtalographic_info(self, data_setters)
decoder_utils.generate_bio_assembly(self, data_setters)
decoder_utils.add_inter_group_bonds(self, data_setters)
data_setters.finalize_structure() | python | def pass_data_on(self, data_setters):
"""Write the data from the getters to the setters.
:param data_setters: a series of functions that can fill a chemical
data structure
:type data_setters: DataTransferInterface
"""
data_setters.init_structure(self.num_bonds, len(self.x_coord_list), len(self.group_type_list),
len(self.chain_id_list), len(self.chains_per_model), self.structure_id)
decoder_utils.add_entity_info(self, data_setters)
decoder_utils.add_atomic_information(self, data_setters)
decoder_utils.add_header_info(self, data_setters)
decoder_utils.add_xtalographic_info(self, data_setters)
decoder_utils.generate_bio_assembly(self, data_setters)
decoder_utils.add_inter_group_bonds(self, data_setters)
data_setters.finalize_structure() | [
"def",
"pass_data_on",
"(",
"self",
",",
"data_setters",
")",
":",
"data_setters",
".",
"init_structure",
"(",
"self",
".",
"num_bonds",
",",
"len",
"(",
"self",
".",
"x_coord_list",
")",
",",
"len",
"(",
"self",
".",
"group_type_list",
")",
",",
"len",
... | Write the data from the getters to the setters.
:param data_setters: a series of functions that can fill a chemical
data structure
:type data_setters: DataTransferInterface | [
"Write",
"the",
"data",
"from",
"the",
"getters",
"to",
"the",
"setters",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/mmtf_reader.py#L154-L169 | train |
rcsb/mmtf-python | mmtf/api/default_api.py | _internet_on | def _internet_on(address):
"""
Check to see if the internet is on by pinging a set address.
:param address: the IP or address to hit
:return: a boolean - true if can be reached, false if not.
"""
try:
urllib2.urlopen(address, timeout=1)
return True
except urllib2.URLError as err:
return False | python | def _internet_on(address):
"""
Check to see if the internet is on by pinging a set address.
:param address: the IP or address to hit
:return: a boolean - true if can be reached, false if not.
"""
try:
urllib2.urlopen(address, timeout=1)
return True
except urllib2.URLError as err:
return False | [
"def",
"_internet_on",
"(",
"address",
")",
":",
"try",
":",
"urllib2",
".",
"urlopen",
"(",
"address",
",",
"timeout",
"=",
"1",
")",
"return",
"True",
"except",
"urllib2",
".",
"URLError",
"as",
"err",
":",
"return",
"False"
] | Check to see if the internet is on by pinging a set address.
:param address: the IP or address to hit
:return: a boolean - true if can be reached, false if not. | [
"Check",
"to",
"see",
"if",
"the",
"internet",
"is",
"on",
"by",
"pinging",
"a",
"set",
"address",
".",
":",
"param",
"address",
":",
"the",
"IP",
"or",
"address",
"to",
"hit",
":",
"return",
":",
"a",
"boolean",
"-",
"true",
"if",
"can",
"be",
"re... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/default_api.py#L15-L25 | train |
rcsb/mmtf-python | mmtf/api/default_api.py | write_mmtf | def write_mmtf(file_path, input_data, input_function):
"""API function to write data as MMTF to a file
:param file_path the path of the file to write
:param input_data the input data in any user format
:param input_function a function to converte input_data to an output format. Must contain all methods in TemplateEncoder
"""
mmtf_encoder = MMTFEncoder()
pass_data_on(input_data, input_function, mmtf_encoder)
mmtf_encoder.write_file(file_path) | python | def write_mmtf(file_path, input_data, input_function):
"""API function to write data as MMTF to a file
:param file_path the path of the file to write
:param input_data the input data in any user format
:param input_function a function to converte input_data to an output format. Must contain all methods in TemplateEncoder
"""
mmtf_encoder = MMTFEncoder()
pass_data_on(input_data, input_function, mmtf_encoder)
mmtf_encoder.write_file(file_path) | [
"def",
"write_mmtf",
"(",
"file_path",
",",
"input_data",
",",
"input_function",
")",
":",
"mmtf_encoder",
"=",
"MMTFEncoder",
"(",
")",
"pass_data_on",
"(",
"input_data",
",",
"input_function",
",",
"mmtf_encoder",
")",
"mmtf_encoder",
".",
"write_file",
"(",
"... | API function to write data as MMTF to a file
:param file_path the path of the file to write
:param input_data the input data in any user format
:param input_function a function to converte input_data to an output format. Must contain all methods in TemplateEncoder | [
"API",
"function",
"to",
"write",
"data",
"as",
"MMTF",
"to",
"a",
"file"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/default_api.py#L27-L36 | train |
rcsb/mmtf-python | mmtf/api/default_api.py | get_raw_data_from_url | def get_raw_data_from_url(pdb_id, reduced=False):
"""" Get the msgpack unpacked data given a PDB id.
:param pdb_id: the input PDB id
:return the unpacked data (a dict) """
url = get_url(pdb_id,reduced)
request = urllib2.Request(url)
request.add_header('Accept-encoding', 'gzip')
response = urllib2.urlopen(request)
if response.info().get('Content-Encoding') == 'gzip':
data = ungzip_data(response.read())
else:
data = response.read()
return _unpack(data) | python | def get_raw_data_from_url(pdb_id, reduced=False):
"""" Get the msgpack unpacked data given a PDB id.
:param pdb_id: the input PDB id
:return the unpacked data (a dict) """
url = get_url(pdb_id,reduced)
request = urllib2.Request(url)
request.add_header('Accept-encoding', 'gzip')
response = urllib2.urlopen(request)
if response.info().get('Content-Encoding') == 'gzip':
data = ungzip_data(response.read())
else:
data = response.read()
return _unpack(data) | [
"def",
"get_raw_data_from_url",
"(",
"pdb_id",
",",
"reduced",
"=",
"False",
")",
":",
"url",
"=",
"get_url",
"(",
"pdb_id",
",",
"reduced",
")",
"request",
"=",
"urllib2",
".",
"Request",
"(",
"url",
")",
"request",
".",
"add_header",
"(",
"'Accept-encodi... | Get the msgpack unpacked data given a PDB id.
:param pdb_id: the input PDB id
:return the unpacked data (a dict) | [
"Get",
"the",
"msgpack",
"unpacked",
"data",
"given",
"a",
"PDB",
"id",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/default_api.py#L48-L61 | train |
rcsb/mmtf-python | mmtf/api/default_api.py | parse | def parse(file_path):
"""Return a decoded API to the data from a file path.
:param file_path: the input file path. Data is not entropy compressed (e.g. gzip)
:return an API to decoded data """
newDecoder = MMTFDecoder()
with open(file_path, "rb") as fh:
newDecoder.decode_data(_unpack(fh))
return newDecoder | python | def parse(file_path):
"""Return a decoded API to the data from a file path.
:param file_path: the input file path. Data is not entropy compressed (e.g. gzip)
:return an API to decoded data """
newDecoder = MMTFDecoder()
with open(file_path, "rb") as fh:
newDecoder.decode_data(_unpack(fh))
return newDecoder | [
"def",
"parse",
"(",
"file_path",
")",
":",
"newDecoder",
"=",
"MMTFDecoder",
"(",
")",
"with",
"open",
"(",
"file_path",
",",
"\"rb\"",
")",
"as",
"fh",
":",
"newDecoder",
".",
"decode_data",
"(",
"_unpack",
"(",
"fh",
")",
")",
"return",
"newDecoder"
] | Return a decoded API to the data from a file path.
:param file_path: the input file path. Data is not entropy compressed (e.g. gzip)
:return an API to decoded data | [
"Return",
"a",
"decoded",
"API",
"to",
"the",
"data",
"from",
"a",
"file",
"path",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/default_api.py#L87-L95 | train |
rcsb/mmtf-python | mmtf/api/default_api.py | parse_gzip | def parse_gzip(file_path):
"""Return a decoded API to the data from a file path. File is gzip compressed.
:param file_path: the input file path. Data is gzip compressed.
:return an API to decoded data"""
newDecoder = MMTFDecoder()
newDecoder.decode_data(_unpack(gzip.open(file_path, "rb")))
return newDecoder | python | def parse_gzip(file_path):
"""Return a decoded API to the data from a file path. File is gzip compressed.
:param file_path: the input file path. Data is gzip compressed.
:return an API to decoded data"""
newDecoder = MMTFDecoder()
newDecoder.decode_data(_unpack(gzip.open(file_path, "rb")))
return newDecoder | [
"def",
"parse_gzip",
"(",
"file_path",
")",
":",
"newDecoder",
"=",
"MMTFDecoder",
"(",
")",
"newDecoder",
".",
"decode_data",
"(",
"_unpack",
"(",
"gzip",
".",
"open",
"(",
"file_path",
",",
"\"rb\"",
")",
")",
")",
"return",
"newDecoder"
] | Return a decoded API to the data from a file path. File is gzip compressed.
:param file_path: the input file path. Data is gzip compressed.
:return an API to decoded data | [
"Return",
"a",
"decoded",
"API",
"to",
"the",
"data",
"from",
"a",
"file",
"path",
".",
"File",
"is",
"gzip",
"compressed",
".",
":",
"param",
"file_path",
":",
"the",
"input",
"file",
"path",
".",
"Data",
"is",
"gzip",
"compressed",
".",
":",
"return"... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/default_api.py#L98-L104 | train |
rcsb/mmtf-python | mmtf/api/default_api.py | ungzip_data | def ungzip_data(input_data):
"""Return a string of data after gzip decoding
:param the input gziped data
:return the gzip decoded data"""
buf = StringIO(input_data)
f = gzip.GzipFile(fileobj=buf)
return f | python | def ungzip_data(input_data):
"""Return a string of data after gzip decoding
:param the input gziped data
:return the gzip decoded data"""
buf = StringIO(input_data)
f = gzip.GzipFile(fileobj=buf)
return f | [
"def",
"ungzip_data",
"(",
"input_data",
")",
":",
"buf",
"=",
"StringIO",
"(",
"input_data",
")",
"f",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"buf",
")",
"return",
"f"
] | Return a string of data after gzip decoding
:param the input gziped data
:return the gzip decoded data | [
"Return",
"a",
"string",
"of",
"data",
"after",
"gzip",
"decoding"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/api/default_api.py#L107-L114 | train |
rcsb/mmtf-python | mmtf/utils/codec_utils.py | parse_header | def parse_header(input_array):
"""Parse the header and return it along with the input array minus the header.
:param input_array the array to parse
:return the codec, the length of the decoded array, the parameter and the remainder
of the array"""
codec = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[0:4])[0]
length = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[4:8])[0]
param = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[8:12])[0]
return codec,length,param,input_array[12:] | python | def parse_header(input_array):
"""Parse the header and return it along with the input array minus the header.
:param input_array the array to parse
:return the codec, the length of the decoded array, the parameter and the remainder
of the array"""
codec = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[0:4])[0]
length = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[4:8])[0]
param = struct.unpack(mmtf.utils.constants.NUM_DICT[4], input_array[8:12])[0]
return codec,length,param,input_array[12:] | [
"def",
"parse_header",
"(",
"input_array",
")",
":",
"codec",
"=",
"struct",
".",
"unpack",
"(",
"mmtf",
".",
"utils",
".",
"constants",
".",
"NUM_DICT",
"[",
"4",
"]",
",",
"input_array",
"[",
"0",
":",
"4",
"]",
")",
"[",
"0",
"]",
"length",
"=",... | Parse the header and return it along with the input array minus the header.
:param input_array the array to parse
:return the codec, the length of the decoded array, the parameter and the remainder
of the array | [
"Parse",
"the",
"header",
"and",
"return",
"it",
"along",
"with",
"the",
"input",
"array",
"minus",
"the",
"header",
".",
":",
"param",
"input_array",
"the",
"array",
"to",
"parse",
":",
"return",
"the",
"codec",
"the",
"length",
"of",
"the",
"decoded",
... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/codec_utils.py#L7-L15 | train |
rcsb/mmtf-python | mmtf/utils/codec_utils.py | add_header | def add_header(input_array, codec, length, param):
"""Add the header to the appropriate array.
:param the encoded array to add the header to
:param the codec being used
:param the length of the decoded array
:param the parameter to add to the header
:return the prepended encoded byte array"""
return struct.pack(mmtf.utils.constants.NUM_DICT[4], codec) + \
struct.pack(mmtf.utils.constants.NUM_DICT[4], length) + \
struct.pack(mmtf.utils.constants.NUM_DICT[4], param) + input_array | python | def add_header(input_array, codec, length, param):
"""Add the header to the appropriate array.
:param the encoded array to add the header to
:param the codec being used
:param the length of the decoded array
:param the parameter to add to the header
:return the prepended encoded byte array"""
return struct.pack(mmtf.utils.constants.NUM_DICT[4], codec) + \
struct.pack(mmtf.utils.constants.NUM_DICT[4], length) + \
struct.pack(mmtf.utils.constants.NUM_DICT[4], param) + input_array | [
"def",
"add_header",
"(",
"input_array",
",",
"codec",
",",
"length",
",",
"param",
")",
":",
"return",
"struct",
".",
"pack",
"(",
"mmtf",
".",
"utils",
".",
"constants",
".",
"NUM_DICT",
"[",
"4",
"]",
",",
"codec",
")",
"+",
"struct",
".",
"pack",... | Add the header to the appropriate array.
:param the encoded array to add the header to
:param the codec being used
:param the length of the decoded array
:param the parameter to add to the header
:return the prepended encoded byte array | [
"Add",
"the",
"header",
"to",
"the",
"appropriate",
"array",
".",
":",
"param",
"the",
"encoded",
"array",
"to",
"add",
"the",
"header",
"to",
":",
"param",
"the",
"codec",
"being",
"used",
":",
"param",
"the",
"length",
"of",
"the",
"decoded",
"array",
... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/utils/codec_utils.py#L18-L27 | train |
rcsb/mmtf-python | mmtf/converters/converters.py | convert_bytes_to_ints | def convert_bytes_to_ints(in_bytes, num):
"""Convert a byte array into an integer array. The number of bytes forming an integer
is defined by num
:param in_bytes: the input bytes
:param num: the number of bytes per int
:return the integer array"""
out_arr = []
for i in range(len(in_bytes)//num):
val = in_bytes[i * num:i * num + num]
unpacked = struct.unpack(mmtf.utils.constants.NUM_DICT[num], val)
out_arr.append(unpacked[0])
return out_arr | python | def convert_bytes_to_ints(in_bytes, num):
"""Convert a byte array into an integer array. The number of bytes forming an integer
is defined by num
:param in_bytes: the input bytes
:param num: the number of bytes per int
:return the integer array"""
out_arr = []
for i in range(len(in_bytes)//num):
val = in_bytes[i * num:i * num + num]
unpacked = struct.unpack(mmtf.utils.constants.NUM_DICT[num], val)
out_arr.append(unpacked[0])
return out_arr | [
"def",
"convert_bytes_to_ints",
"(",
"in_bytes",
",",
"num",
")",
":",
"out_arr",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"in_bytes",
")",
"//",
"num",
")",
":",
"val",
"=",
"in_bytes",
"[",
"i",
"*",
"num",
":",
"i",
"*",
"num... | Convert a byte array into an integer array. The number of bytes forming an integer
is defined by num
:param in_bytes: the input bytes
:param num: the number of bytes per int
:return the integer array | [
"Convert",
"a",
"byte",
"array",
"into",
"an",
"integer",
"array",
".",
"The",
"number",
"of",
"bytes",
"forming",
"an",
"integer",
"is",
"defined",
"by",
"num",
":",
"param",
"in_bytes",
":",
"the",
"input",
"bytes",
":",
"param",
"num",
":",
"the",
"... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/converters.py#L9-L20 | train |
rcsb/mmtf-python | mmtf/converters/converters.py | convert_ints_to_bytes | def convert_ints_to_bytes(in_ints, num):
"""Convert an integer array into a byte arrays. The number of bytes forming an integer
is defined by num
:param in_ints: the input integers
:param num: the number of bytes per int
:return the integer array"""
out_bytes= b""
for val in in_ints:
out_bytes+=struct.pack(mmtf.utils.constants.NUM_DICT[num], val)
return out_bytes | python | def convert_ints_to_bytes(in_ints, num):
"""Convert an integer array into a byte arrays. The number of bytes forming an integer
is defined by num
:param in_ints: the input integers
:param num: the number of bytes per int
:return the integer array"""
out_bytes= b""
for val in in_ints:
out_bytes+=struct.pack(mmtf.utils.constants.NUM_DICT[num], val)
return out_bytes | [
"def",
"convert_ints_to_bytes",
"(",
"in_ints",
",",
"num",
")",
":",
"out_bytes",
"=",
"b\"\"",
"for",
"val",
"in",
"in_ints",
":",
"out_bytes",
"+=",
"struct",
".",
"pack",
"(",
"mmtf",
".",
"utils",
".",
"constants",
".",
"NUM_DICT",
"[",
"num",
"]",
... | Convert an integer array into a byte arrays. The number of bytes forming an integer
is defined by num
:param in_ints: the input integers
:param num: the number of bytes per int
:return the integer array | [
"Convert",
"an",
"integer",
"array",
"into",
"a",
"byte",
"arrays",
".",
"The",
"number",
"of",
"bytes",
"forming",
"an",
"integer",
"is",
"defined",
"by",
"num"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/converters.py#L22-L32 | train |
rcsb/mmtf-python | mmtf/converters/converters.py | decode_chain_list | def decode_chain_list(in_bytes):
"""Convert a list of bytes to a list of strings. Each string is of length mmtf.CHAIN_LEN
:param in_bytes: the input bytes
:return the decoded list of strings"""
tot_strings = len(in_bytes) // mmtf.utils.constants.CHAIN_LEN
out_strings = []
for i in range(tot_strings):
out_s = in_bytes[i * mmtf.utils.constants.CHAIN_LEN:i * mmtf.utils.constants.CHAIN_LEN + mmtf.utils.constants.CHAIN_LEN]
out_strings.append(out_s.decode("ascii").strip(mmtf.utils.constants.NULL_BYTE))
return out_strings | python | def decode_chain_list(in_bytes):
"""Convert a list of bytes to a list of strings. Each string is of length mmtf.CHAIN_LEN
:param in_bytes: the input bytes
:return the decoded list of strings"""
tot_strings = len(in_bytes) // mmtf.utils.constants.CHAIN_LEN
out_strings = []
for i in range(tot_strings):
out_s = in_bytes[i * mmtf.utils.constants.CHAIN_LEN:i * mmtf.utils.constants.CHAIN_LEN + mmtf.utils.constants.CHAIN_LEN]
out_strings.append(out_s.decode("ascii").strip(mmtf.utils.constants.NULL_BYTE))
return out_strings | [
"def",
"decode_chain_list",
"(",
"in_bytes",
")",
":",
"tot_strings",
"=",
"len",
"(",
"in_bytes",
")",
"//",
"mmtf",
".",
"utils",
".",
"constants",
".",
"CHAIN_LEN",
"out_strings",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"tot_strings",
")",
":",... | Convert a list of bytes to a list of strings. Each string is of length mmtf.CHAIN_LEN
:param in_bytes: the input bytes
:return the decoded list of strings | [
"Convert",
"a",
"list",
"of",
"bytes",
"to",
"a",
"list",
"of",
"strings",
".",
"Each",
"string",
"is",
"of",
"length",
"mmtf",
".",
"CHAIN_LEN"
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/converters.py#L34-L44 | train |
rcsb/mmtf-python | mmtf/converters/converters.py | encode_chain_list | def encode_chain_list(in_strings):
"""Convert a list of strings to a list of byte arrays.
:param in_strings: the input strings
:return the encoded list of byte arrays"""
out_bytes = b""
for in_s in in_strings:
out_bytes+=in_s.encode('ascii')
for i in range(mmtf.utils.constants.CHAIN_LEN -len(in_s)):
out_bytes+= mmtf.utils.constants.NULL_BYTE.encode('ascii')
return out_bytes | python | def encode_chain_list(in_strings):
"""Convert a list of strings to a list of byte arrays.
:param in_strings: the input strings
:return the encoded list of byte arrays"""
out_bytes = b""
for in_s in in_strings:
out_bytes+=in_s.encode('ascii')
for i in range(mmtf.utils.constants.CHAIN_LEN -len(in_s)):
out_bytes+= mmtf.utils.constants.NULL_BYTE.encode('ascii')
return out_bytes | [
"def",
"encode_chain_list",
"(",
"in_strings",
")",
":",
"out_bytes",
"=",
"b\"\"",
"for",
"in_s",
"in",
"in_strings",
":",
"out_bytes",
"+=",
"in_s",
".",
"encode",
"(",
"'ascii'",
")",
"for",
"i",
"in",
"range",
"(",
"mmtf",
".",
"utils",
".",
"constan... | Convert a list of strings to a list of byte arrays.
:param in_strings: the input strings
:return the encoded list of byte arrays | [
"Convert",
"a",
"list",
"of",
"strings",
"to",
"a",
"list",
"of",
"byte",
"arrays",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/converters.py#L47-L57 | train |
rcsb/mmtf-python | mmtf/converters/converters.py | recursive_index_encode | def recursive_index_encode(int_array, max=32767, min=-32768):
"""Pack an integer array using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index encoding"""
out_arr = []
for curr in int_array:
if curr >= 0 :
while curr >= max:
out_arr.append(max)
curr -= max
else:
while curr <= min:
out_arr.append(min)
curr += int(math.fabs(min))
out_arr.append(curr)
return out_arr | python | def recursive_index_encode(int_array, max=32767, min=-32768):
"""Pack an integer array using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index encoding"""
out_arr = []
for curr in int_array:
if curr >= 0 :
while curr >= max:
out_arr.append(max)
curr -= max
else:
while curr <= min:
out_arr.append(min)
curr += int(math.fabs(min))
out_arr.append(curr)
return out_arr | [
"def",
"recursive_index_encode",
"(",
"int_array",
",",
"max",
"=",
"32767",
",",
"min",
"=",
"-",
"32768",
")",
":",
"out_arr",
"=",
"[",
"]",
"for",
"curr",
"in",
"int_array",
":",
"if",
"curr",
">=",
"0",
":",
"while",
"curr",
">=",
"max",
":",
... | Pack an integer array using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index encoding | [
"Pack",
"an",
"integer",
"array",
"using",
"recursive",
"indexing",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/converters.py#L90-L108 | train |
rcsb/mmtf-python | mmtf/converters/converters.py | recursive_index_decode | def recursive_index_decode(int_array, max=32767, min=-32768):
"""Unpack an array of integers using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index decoding"""
out_arr = []
encoded_ind = 0
while encoded_ind < len(int_array):
decoded_val = 0
while int_array[encoded_ind]==max or int_array[encoded_ind]==min:
decoded_val += int_array[encoded_ind]
encoded_ind+=1
if int_array[encoded_ind]==0:
break
decoded_val += int_array[encoded_ind]
encoded_ind+=1
out_arr.append(decoded_val)
return out_arr | python | def recursive_index_decode(int_array, max=32767, min=-32768):
"""Unpack an array of integers using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index decoding"""
out_arr = []
encoded_ind = 0
while encoded_ind < len(int_array):
decoded_val = 0
while int_array[encoded_ind]==max or int_array[encoded_ind]==min:
decoded_val += int_array[encoded_ind]
encoded_ind+=1
if int_array[encoded_ind]==0:
break
decoded_val += int_array[encoded_ind]
encoded_ind+=1
out_arr.append(decoded_val)
return out_arr | [
"def",
"recursive_index_decode",
"(",
"int_array",
",",
"max",
"=",
"32767",
",",
"min",
"=",
"-",
"32768",
")",
":",
"out_arr",
"=",
"[",
"]",
"encoded_ind",
"=",
"0",
"while",
"encoded_ind",
"<",
"len",
"(",
"int_array",
")",
":",
"decoded_val",
"=",
... | Unpack an array of integers using recursive indexing.
:param int_array: the input array of integers
:param max: the maximum integer size
:param min: the minimum integer size
:return the array of integers after recursive index decoding | [
"Unpack",
"an",
"array",
"of",
"integers",
"using",
"recursive",
"indexing",
".",
":",
"param",
"int_array",
":",
"the",
"input",
"array",
"of",
"integers",
":",
"param",
"max",
":",
"the",
"maximum",
"integer",
"size",
":",
"param",
"min",
":",
"the",
"... | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/converters/converters.py#L110-L128 | train |
rcsb/mmtf-python | mmtf/codecs/decoders/numpy_decoders.py | run_length_decode | def run_length_decode(in_array):
"""A function to run length decode an int array.
:param in_array: the input array of integers
:return the decoded array"""
switch=False
out_array=[]
in_array = in_array.tolist()
for item in in_array:
if switch==False:
this_item = item
switch=True
else:
switch=False
out_array.extend([this_item]*int(item))
return numpy.asarray(out_array, dtype=numpy.int32) | python | def run_length_decode(in_array):
"""A function to run length decode an int array.
:param in_array: the input array of integers
:return the decoded array"""
switch=False
out_array=[]
in_array = in_array.tolist()
for item in in_array:
if switch==False:
this_item = item
switch=True
else:
switch=False
out_array.extend([this_item]*int(item))
return numpy.asarray(out_array, dtype=numpy.int32) | [
"def",
"run_length_decode",
"(",
"in_array",
")",
":",
"switch",
"=",
"False",
"out_array",
"=",
"[",
"]",
"in_array",
"=",
"in_array",
".",
"tolist",
"(",
")",
"for",
"item",
"in",
"in_array",
":",
"if",
"switch",
"==",
"False",
":",
"this_item",
"=",
... | A function to run length decode an int array.
:param in_array: the input array of integers
:return the decoded array | [
"A",
"function",
"to",
"run",
"length",
"decode",
"an",
"int",
"array",
"."
] | 899bb877ca1b32a9396803d38c5bf38a2520754e | https://github.com/rcsb/mmtf-python/blob/899bb877ca1b32a9396803d38c5bf38a2520754e/mmtf/codecs/decoders/numpy_decoders.py#L11-L26 | train |
lmjohns3/downhill | examples/rosenbrock.py | build | def build(algo, init):
'''Build and return an optimizer for the rosenbrock function.
In downhill, an optimizer can be constructed using the build() top-level
function. This function requires several Theano quantities such as the loss
being optimized and the parameters to update during optimization.
'''
x = theano.shared(np.array(init, FLOAT), name='x')
n = 0.1 * RandomStreams().normal((len(init) - 1, ))
monitors = []
if len(init) == 2:
# this gives us access to the x and y locations during optimization.
monitors.extend([('x', x[:-1].sum()), ('y', x[1:].sum())])
return downhill.build(
algo,
loss=(n + 100 * (x[1:] - x[:-1] ** 2) ** 2 + (1 - x[:-1]) ** 2).sum(),
params=[x],
monitors=monitors,
monitor_gradients=True) | python | def build(algo, init):
'''Build and return an optimizer for the rosenbrock function.
In downhill, an optimizer can be constructed using the build() top-level
function. This function requires several Theano quantities such as the loss
being optimized and the parameters to update during optimization.
'''
x = theano.shared(np.array(init, FLOAT), name='x')
n = 0.1 * RandomStreams().normal((len(init) - 1, ))
monitors = []
if len(init) == 2:
# this gives us access to the x and y locations during optimization.
monitors.extend([('x', x[:-1].sum()), ('y', x[1:].sum())])
return downhill.build(
algo,
loss=(n + 100 * (x[1:] - x[:-1] ** 2) ** 2 + (1 - x[:-1]) ** 2).sum(),
params=[x],
monitors=monitors,
monitor_gradients=True) | [
"def",
"build",
"(",
"algo",
",",
"init",
")",
":",
"x",
"=",
"theano",
".",
"shared",
"(",
"np",
".",
"array",
"(",
"init",
",",
"FLOAT",
")",
",",
"name",
"=",
"'x'",
")",
"n",
"=",
"0.1",
"*",
"RandomStreams",
"(",
")",
".",
"normal",
"(",
... | Build and return an optimizer for the rosenbrock function.
In downhill, an optimizer can be constructed using the build() top-level
function. This function requires several Theano quantities such as the loss
being optimized and the parameters to update during optimization. | [
"Build",
"and",
"return",
"an",
"optimizer",
"for",
"the",
"rosenbrock",
"function",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/examples/rosenbrock.py#L15-L33 | train |
lmjohns3/downhill | examples/rosenbrock.py | build_and_trace | def build_and_trace(algo, init, limit=100, **kwargs):
'''Run an optimizer on the rosenbrock function. Return xs, ys, and losses.
In downhill, optimization algorithms can be iterated over to progressively
minimize the loss. At each iteration, the optimizer yields a dictionary of
monitor values that were computed during that iteration. Here we build an
optimizer and then run it for a fixed number of iterations.
'''
kw = dict(min_improvement=0, patience=0, max_gradient_norm=100)
kw.update(kwargs)
xs, ys, loss = [], [], []
for tm, _ in build(algo, init).iterate([[]], **kw):
if len(init) == 2:
xs.append(tm['x'])
ys.append(tm['y'])
loss.append(tm['loss'])
if len(loss) == limit:
break
# Return the optimization up to any failure of patience.
return xs[:-9], ys[:-9], loss[-9] | python | def build_and_trace(algo, init, limit=100, **kwargs):
'''Run an optimizer on the rosenbrock function. Return xs, ys, and losses.
In downhill, optimization algorithms can be iterated over to progressively
minimize the loss. At each iteration, the optimizer yields a dictionary of
monitor values that were computed during that iteration. Here we build an
optimizer and then run it for a fixed number of iterations.
'''
kw = dict(min_improvement=0, patience=0, max_gradient_norm=100)
kw.update(kwargs)
xs, ys, loss = [], [], []
for tm, _ in build(algo, init).iterate([[]], **kw):
if len(init) == 2:
xs.append(tm['x'])
ys.append(tm['y'])
loss.append(tm['loss'])
if len(loss) == limit:
break
# Return the optimization up to any failure of patience.
return xs[:-9], ys[:-9], loss[-9] | [
"def",
"build_and_trace",
"(",
"algo",
",",
"init",
",",
"limit",
"=",
"100",
",",
"*",
"*",
"kwargs",
")",
":",
"kw",
"=",
"dict",
"(",
"min_improvement",
"=",
"0",
",",
"patience",
"=",
"0",
",",
"max_gradient_norm",
"=",
"100",
")",
"kw",
".",
"... | Run an optimizer on the rosenbrock function. Return xs, ys, and losses.
In downhill, optimization algorithms can be iterated over to progressively
minimize the loss. At each iteration, the optimizer yields a dictionary of
monitor values that were computed during that iteration. Here we build an
optimizer and then run it for a fixed number of iterations. | [
"Run",
"an",
"optimizer",
"on",
"the",
"rosenbrock",
"function",
".",
"Return",
"xs",
"ys",
"and",
"losses",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/examples/rosenbrock.py#L36-L55 | train |
lmjohns3/downhill | downhill/__init__.py | minimize | def minimize(loss, train, valid=None, params=None, inputs=None, algo='rmsprop',
updates=(), monitors=(), monitor_gradients=False, batch_size=32,
train_batches=None, valid_batches=None, **kwargs):
'''Minimize a loss function with respect to some symbolic parameters.
Additional keyword arguments are passed to the underlying :class:`Optimizer
<downhill.base.Optimizer>` instance.
Parameters
----------
loss : Theano expression
Loss function to minimize. This must be a scalar-valued expression.
train : :class:`Dataset <downhill.dataset.Dataset>`, ndarray, or callable
Dataset to use for computing gradient updates.
valid : :class:`Dataset <downhill.dataset.Dataset>`, ndarray, or callable, optional
Dataset to use for validating the minimization process. The training
dataset is used if this is not provided.
params : list of Theano variables, optional
Symbolic variables to adjust to minimize the loss. If not given, these
will be computed automatically by walking the computation graph.
inputs : list of Theano variables, optional
Symbolic variables required to compute the loss. If not given, these
will be computed automatically by walking the computation graph.
algo : str, optional
Name of the minimization algorithm to use. Must be one of the strings
that can be passed to :func:`build`. Defaults to ``'rmsprop'``.
updates : list of update pairs, optional
A list of pairs providing updates for the internal of the loss
computation. Normally this is empty, but it can be provided if the loss,
for example, requires an update to an internal random number generator.
monitors : dict or sequence of (str, Theano expression) tuples, optional
Additional values to monitor during optimization. These must be provided
as either a sequence of (name, expression) tuples, or as a dictionary
mapping string names to Theano expressions.
monitor_gradients : bool, optional
If True, add monitors to log the norms of the parameter gradients during
optimization. Defaults to False.
batch_size : int, optional
Size of batches provided by datasets. Defaults to 32.
train_batches : int, optional
Number of batches of training data to iterate over during one pass of
optimization. Defaults to None, which uses the entire training dataset.
valid_batches : int, optional
Number of batches of validation data to iterate over during one pass of
validation. Defaults to None, which uses the entire validation dataset.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to monitor values. This dictionary
will always contain the ``'loss'`` key, giving the value of the loss
evaluated on the training dataset.
valid_monitors : dict
A dictionary mapping monitor names to monitor values, evaluated on the
validation dataset. This dictionary will always contain the ``'loss'``
key, giving the value of the loss function. Because validation is not
always computed after every optimization update, these monitor values
may be "stale"; however, they will always contain the most recently
computed values.
'''
if not isinstance(train, Dataset):
train = Dataset(
train,
name='train',
batch_size=batch_size,
iteration_size=train_batches,
)
if valid is not None and not isinstance(valid, Dataset):
valid = Dataset(
valid,
name='valid',
batch_size=batch_size,
iteration_size=valid_batches,
)
return build(
algo,
loss=loss,
params=params,
inputs=inputs,
updates=updates,
monitors=monitors,
monitor_gradients=monitor_gradients,
).minimize(train, valid, **kwargs) | python | def minimize(loss, train, valid=None, params=None, inputs=None, algo='rmsprop',
updates=(), monitors=(), monitor_gradients=False, batch_size=32,
train_batches=None, valid_batches=None, **kwargs):
'''Minimize a loss function with respect to some symbolic parameters.
Additional keyword arguments are passed to the underlying :class:`Optimizer
<downhill.base.Optimizer>` instance.
Parameters
----------
loss : Theano expression
Loss function to minimize. This must be a scalar-valued expression.
train : :class:`Dataset <downhill.dataset.Dataset>`, ndarray, or callable
Dataset to use for computing gradient updates.
valid : :class:`Dataset <downhill.dataset.Dataset>`, ndarray, or callable, optional
Dataset to use for validating the minimization process. The training
dataset is used if this is not provided.
params : list of Theano variables, optional
Symbolic variables to adjust to minimize the loss. If not given, these
will be computed automatically by walking the computation graph.
inputs : list of Theano variables, optional
Symbolic variables required to compute the loss. If not given, these
will be computed automatically by walking the computation graph.
algo : str, optional
Name of the minimization algorithm to use. Must be one of the strings
that can be passed to :func:`build`. Defaults to ``'rmsprop'``.
updates : list of update pairs, optional
A list of pairs providing updates for the internal of the loss
computation. Normally this is empty, but it can be provided if the loss,
for example, requires an update to an internal random number generator.
monitors : dict or sequence of (str, Theano expression) tuples, optional
Additional values to monitor during optimization. These must be provided
as either a sequence of (name, expression) tuples, or as a dictionary
mapping string names to Theano expressions.
monitor_gradients : bool, optional
If True, add monitors to log the norms of the parameter gradients during
optimization. Defaults to False.
batch_size : int, optional
Size of batches provided by datasets. Defaults to 32.
train_batches : int, optional
Number of batches of training data to iterate over during one pass of
optimization. Defaults to None, which uses the entire training dataset.
valid_batches : int, optional
Number of batches of validation data to iterate over during one pass of
validation. Defaults to None, which uses the entire validation dataset.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to monitor values. This dictionary
will always contain the ``'loss'`` key, giving the value of the loss
evaluated on the training dataset.
valid_monitors : dict
A dictionary mapping monitor names to monitor values, evaluated on the
validation dataset. This dictionary will always contain the ``'loss'``
key, giving the value of the loss function. Because validation is not
always computed after every optimization update, these monitor values
may be "stale"; however, they will always contain the most recently
computed values.
'''
if not isinstance(train, Dataset):
train = Dataset(
train,
name='train',
batch_size=batch_size,
iteration_size=train_batches,
)
if valid is not None and not isinstance(valid, Dataset):
valid = Dataset(
valid,
name='valid',
batch_size=batch_size,
iteration_size=valid_batches,
)
return build(
algo,
loss=loss,
params=params,
inputs=inputs,
updates=updates,
monitors=monitors,
monitor_gradients=monitor_gradients,
).minimize(train, valid, **kwargs) | [
"def",
"minimize",
"(",
"loss",
",",
"train",
",",
"valid",
"=",
"None",
",",
"params",
"=",
"None",
",",
"inputs",
"=",
"None",
",",
"algo",
"=",
"'rmsprop'",
",",
"updates",
"=",
"(",
")",
",",
"monitors",
"=",
"(",
")",
",",
"monitor_gradients",
... | Minimize a loss function with respect to some symbolic parameters.
Additional keyword arguments are passed to the underlying :class:`Optimizer
<downhill.base.Optimizer>` instance.
Parameters
----------
loss : Theano expression
Loss function to minimize. This must be a scalar-valued expression.
train : :class:`Dataset <downhill.dataset.Dataset>`, ndarray, or callable
Dataset to use for computing gradient updates.
valid : :class:`Dataset <downhill.dataset.Dataset>`, ndarray, or callable, optional
Dataset to use for validating the minimization process. The training
dataset is used if this is not provided.
params : list of Theano variables, optional
Symbolic variables to adjust to minimize the loss. If not given, these
will be computed automatically by walking the computation graph.
inputs : list of Theano variables, optional
Symbolic variables required to compute the loss. If not given, these
will be computed automatically by walking the computation graph.
algo : str, optional
Name of the minimization algorithm to use. Must be one of the strings
that can be passed to :func:`build`. Defaults to ``'rmsprop'``.
updates : list of update pairs, optional
A list of pairs providing updates for the internal of the loss
computation. Normally this is empty, but it can be provided if the loss,
for example, requires an update to an internal random number generator.
monitors : dict or sequence of (str, Theano expression) tuples, optional
Additional values to monitor during optimization. These must be provided
as either a sequence of (name, expression) tuples, or as a dictionary
mapping string names to Theano expressions.
monitor_gradients : bool, optional
If True, add monitors to log the norms of the parameter gradients during
optimization. Defaults to False.
batch_size : int, optional
Size of batches provided by datasets. Defaults to 32.
train_batches : int, optional
Number of batches of training data to iterate over during one pass of
optimization. Defaults to None, which uses the entire training dataset.
valid_batches : int, optional
Number of batches of validation data to iterate over during one pass of
validation. Defaults to None, which uses the entire validation dataset.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to monitor values. This dictionary
will always contain the ``'loss'`` key, giving the value of the loss
evaluated on the training dataset.
valid_monitors : dict
A dictionary mapping monitor names to monitor values, evaluated on the
validation dataset. This dictionary will always contain the ``'loss'``
key, giving the value of the loss function. Because validation is not
always computed after every optimization update, these monitor values
may be "stale"; however, they will always contain the most recently
computed values. | [
"Minimize",
"a",
"loss",
"function",
"with",
"respect",
"to",
"some",
"symbolic",
"parameters",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/__init__.py#L9-L91 | train |
lmjohns3/downhill | examples/rosenbrock-2d.py | make_label | def make_label(loss, key):
'''Create a legend label for an optimization run.'''
algo, rate, mu, half, reg = key
slots, args = ['{:.3f}', '{}', 'm={:.3f}'], [loss, algo, mu]
if algo in 'SGD NAG RMSProp Adam ESGD'.split():
slots.append('lr={:.2e}')
args.append(rate)
if algo in 'RMSProp ADADELTA ESGD'.split():
slots.append('rmsh={}')
args.append(half)
slots.append('rmsr={:.2e}')
args.append(reg)
return ' '.join(slots).format(*args) | python | def make_label(loss, key):
'''Create a legend label for an optimization run.'''
algo, rate, mu, half, reg = key
slots, args = ['{:.3f}', '{}', 'm={:.3f}'], [loss, algo, mu]
if algo in 'SGD NAG RMSProp Adam ESGD'.split():
slots.append('lr={:.2e}')
args.append(rate)
if algo in 'RMSProp ADADELTA ESGD'.split():
slots.append('rmsh={}')
args.append(half)
slots.append('rmsr={:.2e}')
args.append(reg)
return ' '.join(slots).format(*args) | [
"def",
"make_label",
"(",
"loss",
",",
"key",
")",
":",
"algo",
",",
"rate",
",",
"mu",
",",
"half",
",",
"reg",
"=",
"key",
"slots",
",",
"args",
"=",
"[",
"'{:.3f}'",
",",
"'{}'",
",",
"'m={:.3f}'",
"]",
",",
"[",
"loss",
",",
"algo",
",",
"m... | Create a legend label for an optimization run. | [
"Create",
"a",
"legend",
"label",
"for",
"an",
"optimization",
"run",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/examples/rosenbrock-2d.py#L25-L37 | train |
lmjohns3/downhill | downhill/dataset.py | Dataset.iterate | def iterate(self, shuffle=True):
'''Iterate over batches in the dataset.
This method generates ``iteration_size`` batches from the dataset and
then returns.
Parameters
----------
shuffle : bool, optional
Shuffle the batches in this dataset if the iteration reaches the end
of the batch list. Defaults to True.
Yields
------
batches : data batches
A sequence of batches---often from a training, validation, or test
dataset.
'''
for _ in range(self.iteration_size):
if self._callable is not None:
yield self._callable()
else:
yield self._next_batch(shuffle) | python | def iterate(self, shuffle=True):
'''Iterate over batches in the dataset.
This method generates ``iteration_size`` batches from the dataset and
then returns.
Parameters
----------
shuffle : bool, optional
Shuffle the batches in this dataset if the iteration reaches the end
of the batch list. Defaults to True.
Yields
------
batches : data batches
A sequence of batches---often from a training, validation, or test
dataset.
'''
for _ in range(self.iteration_size):
if self._callable is not None:
yield self._callable()
else:
yield self._next_batch(shuffle) | [
"def",
"iterate",
"(",
"self",
",",
"shuffle",
"=",
"True",
")",
":",
"for",
"_",
"in",
"range",
"(",
"self",
".",
"iteration_size",
")",
":",
"if",
"self",
".",
"_callable",
"is",
"not",
"None",
":",
"yield",
"self",
".",
"_callable",
"(",
")",
"e... | Iterate over batches in the dataset.
This method generates ``iteration_size`` batches from the dataset and
then returns.
Parameters
----------
shuffle : bool, optional
Shuffle the batches in this dataset if the iteration reaches the end
of the batch list. Defaults to True.
Yields
------
batches : data batches
A sequence of batches---often from a training, validation, or test
dataset. | [
"Iterate",
"over",
"batches",
"in",
"the",
"dataset",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/dataset.py#L183-L205 | train |
lmjohns3/downhill | downhill/util.py | shared_like | def shared_like(param, suffix, init=0):
'''Create a Theano shared variable like an existing parameter.
Parameters
----------
param : Theano variable
Theano variable to use for shape information.
suffix : str
Suffix to append to the parameter's name for the new variable.
init : float or ndarray, optional
Initial value of the shared variable. Defaults to 0.
Returns
-------
shared : Theano shared variable
A new shared variable with the same shape and data type as ``param``.
'''
return theano.shared(np.zeros_like(param.get_value()) + init,
name='{}_{}'.format(param.name, suffix),
broadcastable=param.broadcastable) | python | def shared_like(param, suffix, init=0):
'''Create a Theano shared variable like an existing parameter.
Parameters
----------
param : Theano variable
Theano variable to use for shape information.
suffix : str
Suffix to append to the parameter's name for the new variable.
init : float or ndarray, optional
Initial value of the shared variable. Defaults to 0.
Returns
-------
shared : Theano shared variable
A new shared variable with the same shape and data type as ``param``.
'''
return theano.shared(np.zeros_like(param.get_value()) + init,
name='{}_{}'.format(param.name, suffix),
broadcastable=param.broadcastable) | [
"def",
"shared_like",
"(",
"param",
",",
"suffix",
",",
"init",
"=",
"0",
")",
":",
"return",
"theano",
".",
"shared",
"(",
"np",
".",
"zeros_like",
"(",
"param",
".",
"get_value",
"(",
")",
")",
"+",
"init",
",",
"name",
"=",
"'{}_{}'",
".",
"form... | Create a Theano shared variable like an existing parameter.
Parameters
----------
param : Theano variable
Theano variable to use for shape information.
suffix : str
Suffix to append to the parameter's name for the new variable.
init : float or ndarray, optional
Initial value of the shared variable. Defaults to 0.
Returns
-------
shared : Theano shared variable
A new shared variable with the same shape and data type as ``param``. | [
"Create",
"a",
"Theano",
"shared",
"variable",
"like",
"an",
"existing",
"parameter",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/util.py#L30-L49 | train |
lmjohns3/downhill | downhill/util.py | find_inputs_and_params | def find_inputs_and_params(node):
'''Walk a computation graph and extract root variables.
Parameters
----------
node : Theano expression
A symbolic Theano expression to walk.
Returns
-------
inputs : list Theano variables
A list of candidate inputs for this graph. Inputs are nodes in the graph
with no parents that are not shared and are not constants.
params : list of Theano shared variables
A list of candidate parameters for this graph. Parameters are nodes in
the graph that are shared variables.
'''
queue, seen, inputs, params = [node], set(), set(), set()
while queue:
node = queue.pop()
seen.add(node)
queue.extend(p for p in node.get_parents() if p not in seen)
if not node.get_parents():
if isinstance(node, theano.compile.SharedVariable):
params.add(node)
elif not isinstance(node, TT.Constant):
inputs.add(node)
return list(inputs), list(params) | python | def find_inputs_and_params(node):
'''Walk a computation graph and extract root variables.
Parameters
----------
node : Theano expression
A symbolic Theano expression to walk.
Returns
-------
inputs : list Theano variables
A list of candidate inputs for this graph. Inputs are nodes in the graph
with no parents that are not shared and are not constants.
params : list of Theano shared variables
A list of candidate parameters for this graph. Parameters are nodes in
the graph that are shared variables.
'''
queue, seen, inputs, params = [node], set(), set(), set()
while queue:
node = queue.pop()
seen.add(node)
queue.extend(p for p in node.get_parents() if p not in seen)
if not node.get_parents():
if isinstance(node, theano.compile.SharedVariable):
params.add(node)
elif not isinstance(node, TT.Constant):
inputs.add(node)
return list(inputs), list(params) | [
"def",
"find_inputs_and_params",
"(",
"node",
")",
":",
"queue",
",",
"seen",
",",
"inputs",
",",
"params",
"=",
"[",
"node",
"]",
",",
"set",
"(",
")",
",",
"set",
"(",
")",
",",
"set",
"(",
")",
"while",
"queue",
":",
"node",
"=",
"queue",
".",... | Walk a computation graph and extract root variables.
Parameters
----------
node : Theano expression
A symbolic Theano expression to walk.
Returns
-------
inputs : list Theano variables
A list of candidate inputs for this graph. Inputs are nodes in the graph
with no parents that are not shared and are not constants.
params : list of Theano shared variables
A list of candidate parameters for this graph. Parameters are nodes in
the graph that are shared variables. | [
"Walk",
"a",
"computation",
"graph",
"and",
"extract",
"root",
"variables",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/util.py#L68-L95 | train |
lmjohns3/downhill | downhill/util.py | log | def log(msg, *args, **kwargs):
'''Log a message to the console.
Parameters
----------
msg : str
A string to display on the console. This can contain {}-style
formatting commands; the remaining positional and keyword arguments
will be used to fill them in.
'''
now = datetime.datetime.now()
module = 'downhill'
if _detailed_callsite:
caller = inspect.stack()[1]
parts = caller.filename.replace('.py', '').split('/')
module = '{}:{}'.format(
'.'.join(parts[parts.index('downhill')+1:]), caller.lineno)
click.echo(' '.join((
click.style(now.strftime('%Y%m%d'), fg='blue'),
click.style(now.strftime('%H%M%S'), fg='cyan'),
click.style(module, fg='magenta'),
msg.format(*args, **kwargs),
))) | python | def log(msg, *args, **kwargs):
'''Log a message to the console.
Parameters
----------
msg : str
A string to display on the console. This can contain {}-style
formatting commands; the remaining positional and keyword arguments
will be used to fill them in.
'''
now = datetime.datetime.now()
module = 'downhill'
if _detailed_callsite:
caller = inspect.stack()[1]
parts = caller.filename.replace('.py', '').split('/')
module = '{}:{}'.format(
'.'.join(parts[parts.index('downhill')+1:]), caller.lineno)
click.echo(' '.join((
click.style(now.strftime('%Y%m%d'), fg='blue'),
click.style(now.strftime('%H%M%S'), fg='cyan'),
click.style(module, fg='magenta'),
msg.format(*args, **kwargs),
))) | [
"def",
"log",
"(",
"msg",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"now",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"module",
"=",
"'downhill'",
"if",
"_detailed_callsite",
":",
"caller",
"=",
"inspect",
".",
"stack",
"(",
... | Log a message to the console.
Parameters
----------
msg : str
A string to display on the console. This can contain {}-style
formatting commands; the remaining positional and keyword arguments
will be used to fill them in. | [
"Log",
"a",
"message",
"to",
"the",
"console",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/util.py#L107-L129 | train |
lmjohns3/downhill | downhill/util.py | log_param | def log_param(name, value):
'''Log a parameter value to the console.
Parameters
----------
name : str
Name of the parameter being logged.
value : any
Value of the parameter being logged.
'''
log('setting {} = {}', click.style(str(name)),
click.style(str(value), fg='yellow')) | python | def log_param(name, value):
'''Log a parameter value to the console.
Parameters
----------
name : str
Name of the parameter being logged.
value : any
Value of the parameter being logged.
'''
log('setting {} = {}', click.style(str(name)),
click.style(str(value), fg='yellow')) | [
"def",
"log_param",
"(",
"name",
",",
"value",
")",
":",
"log",
"(",
"'setting {} = {}'",
",",
"click",
".",
"style",
"(",
"str",
"(",
"name",
")",
")",
",",
"click",
".",
"style",
"(",
"str",
"(",
"value",
")",
",",
"fg",
"=",
"'yellow'",
")",
"... | Log a parameter value to the console.
Parameters
----------
name : str
Name of the parameter being logged.
value : any
Value of the parameter being logged. | [
"Log",
"a",
"parameter",
"value",
"to",
"the",
"console",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/util.py#L132-L143 | train |
lmjohns3/downhill | examples/mnist-sparse-factorization.py | load_mnist | def load_mnist():
'''Load the MNIST digits dataset.'''
mnist = skdata.mnist.dataset.MNIST()
mnist.meta # trigger download if needed.
def arr(n, dtype):
arr = mnist.arrays[n]
return arr.reshape((len(arr), -1)).astype(dtype)
train_images = arr('train_images', np.float32) / 128 - 1
train_labels = arr('train_labels', np.uint8)
return ((train_images[:50000], train_labels[:50000, 0]),
(train_images[50000:], train_labels[50000:, 0])) | python | def load_mnist():
'''Load the MNIST digits dataset.'''
mnist = skdata.mnist.dataset.MNIST()
mnist.meta # trigger download if needed.
def arr(n, dtype):
arr = mnist.arrays[n]
return arr.reshape((len(arr), -1)).astype(dtype)
train_images = arr('train_images', np.float32) / 128 - 1
train_labels = arr('train_labels', np.uint8)
return ((train_images[:50000], train_labels[:50000, 0]),
(train_images[50000:], train_labels[50000:, 0])) | [
"def",
"load_mnist",
"(",
")",
":",
"mnist",
"=",
"skdata",
".",
"mnist",
".",
"dataset",
".",
"MNIST",
"(",
")",
"mnist",
".",
"meta",
"# trigger download if needed.",
"def",
"arr",
"(",
"n",
",",
"dtype",
")",
":",
"arr",
"=",
"mnist",
".",
"arrays",... | Load the MNIST digits dataset. | [
"Load",
"the",
"MNIST",
"digits",
"dataset",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/examples/mnist-sparse-factorization.py#L11-L22 | train |
lmjohns3/downhill | downhill/base.py | build | def build(algo, loss, params=None, inputs=None, updates=(), monitors=(),
monitor_gradients=False):
'''Construct an optimizer by name.
Parameters
----------
algo : str
The name of the optimization algorithm to build.
loss : Theano expression
Loss function to minimize. This must be a scalar-valued expression.
params : list of Theano variables, optional
Symbolic variables to adjust to minimize the loss. If not given, these
will be computed automatically by walking the computation graph.
inputs : list of Theano variables, optional
Symbolic variables required to compute the loss. If not given, these
will be computed automatically by walking the computation graph.
updates : list of update pairs, optional
A list of pairs providing updates for the internal of the loss
computation. Normally this is empty, but it can be provided if the loss,
for example, requires an update to an internal random number generator.
monitors : dict or sequence of (str, Theano expression) tuples, optional
Additional values to monitor during optimization. These must be provided
as either a sequence of (name, expression) tuples, or as a dictionary
mapping string names to Theano expressions.
monitor_gradients : bool, optional
If True, add monitors to log the norms of the parameter gradients during
optimization. Defaults to False.
Returns
-------
optimizer : :class:`Optimizer`
An optimizer instance.
'''
return Optimizer.build(algo, loss, params, inputs,
updates=updates, monitors=monitors,
monitor_gradients=monitor_gradients) | python | def build(algo, loss, params=None, inputs=None, updates=(), monitors=(),
monitor_gradients=False):
'''Construct an optimizer by name.
Parameters
----------
algo : str
The name of the optimization algorithm to build.
loss : Theano expression
Loss function to minimize. This must be a scalar-valued expression.
params : list of Theano variables, optional
Symbolic variables to adjust to minimize the loss. If not given, these
will be computed automatically by walking the computation graph.
inputs : list of Theano variables, optional
Symbolic variables required to compute the loss. If not given, these
will be computed automatically by walking the computation graph.
updates : list of update pairs, optional
A list of pairs providing updates for the internal of the loss
computation. Normally this is empty, but it can be provided if the loss,
for example, requires an update to an internal random number generator.
monitors : dict or sequence of (str, Theano expression) tuples, optional
Additional values to monitor during optimization. These must be provided
as either a sequence of (name, expression) tuples, or as a dictionary
mapping string names to Theano expressions.
monitor_gradients : bool, optional
If True, add monitors to log the norms of the parameter gradients during
optimization. Defaults to False.
Returns
-------
optimizer : :class:`Optimizer`
An optimizer instance.
'''
return Optimizer.build(algo, loss, params, inputs,
updates=updates, monitors=monitors,
monitor_gradients=monitor_gradients) | [
"def",
"build",
"(",
"algo",
",",
"loss",
",",
"params",
"=",
"None",
",",
"inputs",
"=",
"None",
",",
"updates",
"=",
"(",
")",
",",
"monitors",
"=",
"(",
")",
",",
"monitor_gradients",
"=",
"False",
")",
":",
"return",
"Optimizer",
".",
"build",
... | Construct an optimizer by name.
Parameters
----------
algo : str
The name of the optimization algorithm to build.
loss : Theano expression
Loss function to minimize. This must be a scalar-valued expression.
params : list of Theano variables, optional
Symbolic variables to adjust to minimize the loss. If not given, these
will be computed automatically by walking the computation graph.
inputs : list of Theano variables, optional
Symbolic variables required to compute the loss. If not given, these
will be computed automatically by walking the computation graph.
updates : list of update pairs, optional
A list of pairs providing updates for the internal of the loss
computation. Normally this is empty, but it can be provided if the loss,
for example, requires an update to an internal random number generator.
monitors : dict or sequence of (str, Theano expression) tuples, optional
Additional values to monitor during optimization. These must be provided
as either a sequence of (name, expression) tuples, or as a dictionary
mapping string names to Theano expressions.
monitor_gradients : bool, optional
If True, add monitors to log the norms of the parameter gradients during
optimization. Defaults to False.
Returns
-------
optimizer : :class:`Optimizer`
An optimizer instance. | [
"Construct",
"an",
"optimizer",
"by",
"name",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L15-L50 | train |
lmjohns3/downhill | downhill/base.py | Optimizer._compile | def _compile(self, **kwargs):
'''Compile the Theano functions for evaluating and updating our model.
'''
util.log('compiling evaluation function')
self.f_eval = theano.function(self._inputs,
self._monitor_exprs,
updates=self._updates,
name='evaluation')
label = self.__class__.__name__
util.log('compiling {} optimizer'.format(click.style(label, fg='red')))
updates = list(self._updates) + list(self.get_updates(**kwargs))
self.f_step = theano.function(self._inputs,
self._monitor_exprs,
updates=updates,
name=label) | python | def _compile(self, **kwargs):
'''Compile the Theano functions for evaluating and updating our model.
'''
util.log('compiling evaluation function')
self.f_eval = theano.function(self._inputs,
self._monitor_exprs,
updates=self._updates,
name='evaluation')
label = self.__class__.__name__
util.log('compiling {} optimizer'.format(click.style(label, fg='red')))
updates = list(self._updates) + list(self.get_updates(**kwargs))
self.f_step = theano.function(self._inputs,
self._monitor_exprs,
updates=updates,
name=label) | [
"def",
"_compile",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"util",
".",
"log",
"(",
"'compiling evaluation function'",
")",
"self",
".",
"f_eval",
"=",
"theano",
".",
"function",
"(",
"self",
".",
"_inputs",
",",
"self",
".",
"_monitor_exprs",
",... | Compile the Theano functions for evaluating and updating our model. | [
"Compile",
"the",
"Theano",
"functions",
"for",
"evaluating",
"and",
"updating",
"our",
"model",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L153-L167 | train |
lmjohns3/downhill | downhill/base.py | Optimizer.get_updates | def get_updates(self, **kwargs):
'''Get parameter update expressions for performing optimization.
Keyword arguments can be applied here to set any of the global
optimizer attributes.
Yields
------
updates : (parameter, expression) tuples
A sequence of parameter updates to be applied during optimization.
'''
self._prepare(**kwargs)
for param, grad in self._differentiate():
for var, update in self._get_updates_for(param, grad):
# For auxiliary variables, updates are meant to replace the
# existing variable value.
if var != param:
yield var, update
continue
# If momentum is disabled, just apply the parameter delta.
if self.momentum == 0:
yield var, param - update
continue
# Momentum is enabled, so we keep track of velocity here.
vel_tm1 = util.shared_like(param, 'vel')
vel_t = util.as_float(self.momentum) * vel_tm1 - update
if self.nesterov:
# see http://arxiv.org/pdf/1212.0901v2.pdf (eq 7) and
# https://github.com/lisa-lab/pylearn2/pull/136#issuecomment-10381617
mom_sqr = util.as_float(self.momentum ** 2)
mom_inc = util.as_float(1 + self.momentum)
vel_t = mom_sqr * vel_tm1 - mom_inc * update
yield vel_tm1, vel_t
yield param, param + vel_t | python | def get_updates(self, **kwargs):
'''Get parameter update expressions for performing optimization.
Keyword arguments can be applied here to set any of the global
optimizer attributes.
Yields
------
updates : (parameter, expression) tuples
A sequence of parameter updates to be applied during optimization.
'''
self._prepare(**kwargs)
for param, grad in self._differentiate():
for var, update in self._get_updates_for(param, grad):
# For auxiliary variables, updates are meant to replace the
# existing variable value.
if var != param:
yield var, update
continue
# If momentum is disabled, just apply the parameter delta.
if self.momentum == 0:
yield var, param - update
continue
# Momentum is enabled, so we keep track of velocity here.
vel_tm1 = util.shared_like(param, 'vel')
vel_t = util.as_float(self.momentum) * vel_tm1 - update
if self.nesterov:
# see http://arxiv.org/pdf/1212.0901v2.pdf (eq 7) and
# https://github.com/lisa-lab/pylearn2/pull/136#issuecomment-10381617
mom_sqr = util.as_float(self.momentum ** 2)
mom_inc = util.as_float(1 + self.momentum)
vel_t = mom_sqr * vel_tm1 - mom_inc * update
yield vel_tm1, vel_t
yield param, param + vel_t | [
"def",
"get_updates",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_prepare",
"(",
"*",
"*",
"kwargs",
")",
"for",
"param",
",",
"grad",
"in",
"self",
".",
"_differentiate",
"(",
")",
":",
"for",
"var",
",",
"update",
"in",
"self",... | Get parameter update expressions for performing optimization.
Keyword arguments can be applied here to set any of the global
optimizer attributes.
Yields
------
updates : (parameter, expression) tuples
A sequence of parameter updates to be applied during optimization. | [
"Get",
"parameter",
"update",
"expressions",
"for",
"performing",
"optimization",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L169-L202 | train |
lmjohns3/downhill | downhill/base.py | Optimizer._differentiate | def _differentiate(self, params=None):
'''Return a sequence of gradients for our parameters.
If this optimizer has been configured with a gradient norm limit, or
with elementwise gradient clipping, this method applies the appropriate
rescaling and clipping operations before returning the gradient.
Parameters
----------
params : list of Theano variables, optional
Return the gradient with respect to these parameters. Defaults to
all parameters that the optimizer knows about.
Yields
------
pairs : (param, grad) tuples
Generates a sequence of tuples representing each of the parameters
requested and the corresponding Theano gradient expressions.
'''
if params is None:
params = self._params
for param, grad in zip(params, TT.grad(self._loss, params)):
if self.max_gradient_elem > 0:
limit = util.as_float(self.max_gradient_elem)
yield param, TT.clip(grad, -limit, limit)
elif self.max_gradient_norm > 0:
norm = TT.sqrt((grad * grad).sum())
limit = util.as_float(self.max_gradient_norm)
yield param, grad * TT.minimum(1, limit / norm)
else:
yield param, grad | python | def _differentiate(self, params=None):
'''Return a sequence of gradients for our parameters.
If this optimizer has been configured with a gradient norm limit, or
with elementwise gradient clipping, this method applies the appropriate
rescaling and clipping operations before returning the gradient.
Parameters
----------
params : list of Theano variables, optional
Return the gradient with respect to these parameters. Defaults to
all parameters that the optimizer knows about.
Yields
------
pairs : (param, grad) tuples
Generates a sequence of tuples representing each of the parameters
requested and the corresponding Theano gradient expressions.
'''
if params is None:
params = self._params
for param, grad in zip(params, TT.grad(self._loss, params)):
if self.max_gradient_elem > 0:
limit = util.as_float(self.max_gradient_elem)
yield param, TT.clip(grad, -limit, limit)
elif self.max_gradient_norm > 0:
norm = TT.sqrt((grad * grad).sum())
limit = util.as_float(self.max_gradient_norm)
yield param, grad * TT.minimum(1, limit / norm)
else:
yield param, grad | [
"def",
"_differentiate",
"(",
"self",
",",
"params",
"=",
"None",
")",
":",
"if",
"params",
"is",
"None",
":",
"params",
"=",
"self",
".",
"_params",
"for",
"param",
",",
"grad",
"in",
"zip",
"(",
"params",
",",
"TT",
".",
"grad",
"(",
"self",
".",... | Return a sequence of gradients for our parameters.
If this optimizer has been configured with a gradient norm limit, or
with elementwise gradient clipping, this method applies the appropriate
rescaling and clipping operations before returning the gradient.
Parameters
----------
params : list of Theano variables, optional
Return the gradient with respect to these parameters. Defaults to
all parameters that the optimizer knows about.
Yields
------
pairs : (param, grad) tuples
Generates a sequence of tuples representing each of the parameters
requested and the corresponding Theano gradient expressions. | [
"Return",
"a",
"sequence",
"of",
"gradients",
"for",
"our",
"parameters",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L214-L244 | train |
lmjohns3/downhill | downhill/base.py | Optimizer.set_params | def set_params(self, targets=None):
'''Set the values of the parameters to the given target values.
Parameters
----------
targets : sequence of ndarray, optional
Arrays for setting the parameters of our model. If this is not
provided, the current best parameters for this optimizer will be
used.
'''
if not isinstance(targets, (list, tuple)):
targets = self._best_params
for param, target in zip(self._params, targets):
param.set_value(target) | python | def set_params(self, targets=None):
'''Set the values of the parameters to the given target values.
Parameters
----------
targets : sequence of ndarray, optional
Arrays for setting the parameters of our model. If this is not
provided, the current best parameters for this optimizer will be
used.
'''
if not isinstance(targets, (list, tuple)):
targets = self._best_params
for param, target in zip(self._params, targets):
param.set_value(target) | [
"def",
"set_params",
"(",
"self",
",",
"targets",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"targets",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"targets",
"=",
"self",
".",
"_best_params",
"for",
"param",
",",
"target",
"in",
"zip"... | Set the values of the parameters to the given target values.
Parameters
----------
targets : sequence of ndarray, optional
Arrays for setting the parameters of our model. If this is not
provided, the current best parameters for this optimizer will be
used. | [
"Set",
"the",
"values",
"of",
"the",
"parameters",
"to",
"the",
"given",
"target",
"values",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L246-L259 | train |
lmjohns3/downhill | downhill/base.py | Optimizer._log | def _log(self, monitors, iteration, label='', suffix=''):
'''Log the state of the optimizer on the console.
Parameters
----------
monitors : OrderedDict
A dictionary of monitor names mapped to values. These names and
values are what is being logged.
iteration : int
Optimization iteration that we are logging.
label : str, optional
A label for the name of the optimizer creating the log line.
Defaults to the name of the current class.
suffix : str, optional
A suffix to add to the end of the log line, if any.
'''
label = label or self.__class__.__name__
fields = (('{}={:.6f}').format(k, v) for k, v in monitors.items())
util.log('{} {} {}{}'.format(label, iteration, ' '.join(fields), suffix)) | python | def _log(self, monitors, iteration, label='', suffix=''):
'''Log the state of the optimizer on the console.
Parameters
----------
monitors : OrderedDict
A dictionary of monitor names mapped to values. These names and
values are what is being logged.
iteration : int
Optimization iteration that we are logging.
label : str, optional
A label for the name of the optimizer creating the log line.
Defaults to the name of the current class.
suffix : str, optional
A suffix to add to the end of the log line, if any.
'''
label = label or self.__class__.__name__
fields = (('{}={:.6f}').format(k, v) for k, v in monitors.items())
util.log('{} {} {}{}'.format(label, iteration, ' '.join(fields), suffix)) | [
"def",
"_log",
"(",
"self",
",",
"monitors",
",",
"iteration",
",",
"label",
"=",
"''",
",",
"suffix",
"=",
"''",
")",
":",
"label",
"=",
"label",
"or",
"self",
".",
"__class__",
".",
"__name__",
"fields",
"=",
"(",
"(",
"'{}={:.6f}'",
")",
".",
"f... | Log the state of the optimizer on the console.
Parameters
----------
monitors : OrderedDict
A dictionary of monitor names mapped to values. These names and
values are what is being logged.
iteration : int
Optimization iteration that we are logging.
label : str, optional
A label for the name of the optimizer creating the log line.
Defaults to the name of the current class.
suffix : str, optional
A suffix to add to the end of the log line, if any. | [
"Log",
"the",
"state",
"of",
"the",
"optimizer",
"on",
"the",
"console",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L261-L279 | train |
lmjohns3/downhill | downhill/base.py | Optimizer.evaluate | def evaluate(self, dataset):
'''Evaluate the current model parameters on a dataset.
Parameters
----------
dataset : :class:`Dataset <downhill.dataset.Dataset>`
A set of data to use for evaluating the model.
Returns
-------
monitors : OrderedDict
A dictionary mapping monitor names to values. Monitors are
quantities of interest during optimization---for example, loss
function, accuracy, or whatever the optimization task requires.
'''
if dataset is None:
values = [self.f_eval()]
else:
values = [self.f_eval(*x) for x in dataset]
monitors = zip(self._monitor_names, np.mean(values, axis=0))
return collections.OrderedDict(monitors) | python | def evaluate(self, dataset):
'''Evaluate the current model parameters on a dataset.
Parameters
----------
dataset : :class:`Dataset <downhill.dataset.Dataset>`
A set of data to use for evaluating the model.
Returns
-------
monitors : OrderedDict
A dictionary mapping monitor names to values. Monitors are
quantities of interest during optimization---for example, loss
function, accuracy, or whatever the optimization task requires.
'''
if dataset is None:
values = [self.f_eval()]
else:
values = [self.f_eval(*x) for x in dataset]
monitors = zip(self._monitor_names, np.mean(values, axis=0))
return collections.OrderedDict(monitors) | [
"def",
"evaluate",
"(",
"self",
",",
"dataset",
")",
":",
"if",
"dataset",
"is",
"None",
":",
"values",
"=",
"[",
"self",
".",
"f_eval",
"(",
")",
"]",
"else",
":",
"values",
"=",
"[",
"self",
".",
"f_eval",
"(",
"*",
"x",
")",
"for",
"x",
"in"... | Evaluate the current model parameters on a dataset.
Parameters
----------
dataset : :class:`Dataset <downhill.dataset.Dataset>`
A set of data to use for evaluating the model.
Returns
-------
monitors : OrderedDict
A dictionary mapping monitor names to values. Monitors are
quantities of interest during optimization---for example, loss
function, accuracy, or whatever the optimization task requires. | [
"Evaluate",
"the",
"current",
"model",
"parameters",
"on",
"a",
"dataset",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L281-L301 | train |
lmjohns3/downhill | downhill/base.py | Optimizer._prepare | def _prepare(self, **kwargs):
'''Set up properties for optimization.
This method can be overridden by base classes to provide parameters that
are specific to a particular optimization technique (e.g., setting up a
learning rate value).
'''
self.learning_rate = util.as_float(kwargs.pop('learning_rate', 1e-4))
self.momentum = kwargs.pop('momentum', 0)
self.nesterov = kwargs.pop('nesterov', False)
self.patience = kwargs.get('patience', 5)
self.validate_every = kwargs.pop('validate_every', 10)
self.min_improvement = kwargs.pop('min_improvement', 0)
self.max_gradient_norm = kwargs.pop('max_gradient_norm', 0)
self.max_gradient_elem = kwargs.pop('max_gradient_elem', 0)
util.log_param('patience', self.patience)
util.log_param('validate_every', self.validate_every)
util.log_param('min_improvement', self.min_improvement)
util.log_param('max_gradient_norm', self.max_gradient_norm)
util.log_param('max_gradient_elem', self.max_gradient_elem)
util.log_param('learning_rate', self.learning_rate)
util.log_param('momentum', self.momentum)
util.log_param('nesterov', self.nesterov) | python | def _prepare(self, **kwargs):
'''Set up properties for optimization.
This method can be overridden by base classes to provide parameters that
are specific to a particular optimization technique (e.g., setting up a
learning rate value).
'''
self.learning_rate = util.as_float(kwargs.pop('learning_rate', 1e-4))
self.momentum = kwargs.pop('momentum', 0)
self.nesterov = kwargs.pop('nesterov', False)
self.patience = kwargs.get('patience', 5)
self.validate_every = kwargs.pop('validate_every', 10)
self.min_improvement = kwargs.pop('min_improvement', 0)
self.max_gradient_norm = kwargs.pop('max_gradient_norm', 0)
self.max_gradient_elem = kwargs.pop('max_gradient_elem', 0)
util.log_param('patience', self.patience)
util.log_param('validate_every', self.validate_every)
util.log_param('min_improvement', self.min_improvement)
util.log_param('max_gradient_norm', self.max_gradient_norm)
util.log_param('max_gradient_elem', self.max_gradient_elem)
util.log_param('learning_rate', self.learning_rate)
util.log_param('momentum', self.momentum)
util.log_param('nesterov', self.nesterov) | [
"def",
"_prepare",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"learning_rate",
"=",
"util",
".",
"as_float",
"(",
"kwargs",
".",
"pop",
"(",
"'learning_rate'",
",",
"1e-4",
")",
")",
"self",
".",
"momentum",
"=",
"kwargs",
".",
"pop... | Set up properties for optimization.
This method can be overridden by base classes to provide parameters that
are specific to a particular optimization technique (e.g., setting up a
learning rate value). | [
"Set",
"up",
"properties",
"for",
"optimization",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L329-L352 | train |
lmjohns3/downhill | downhill/base.py | Optimizer.iterate | def iterate(self, train=None, valid=None, max_updates=None, **kwargs):
r'''Optimize a loss iteratively using a training and validation dataset.
This method yields a series of monitor values to the caller. After every
optimization epoch, a pair of monitor dictionaries is generated: one
evaluated on the training dataset during the epoch, and another
evaluated on the validation dataset at the most recent validation epoch.
The validation monitors might not be updated during every optimization
iteration; in this case, the most recent validation monitors will be
yielded along with the training monitors.
Additional keyword arguments supplied here will set the global
optimizer attributes.
Parameters
----------
train : sequence or :class:`Dataset <downhill.dataset.Dataset>`
A set of training data for computing updates to model parameters.
valid : sequence or :class:`Dataset <downhill.dataset.Dataset>`
A set of validation data for computing monitor values and
determining when the loss has stopped improving. Defaults to the
training data.
max_updates : int, optional
If specified, halt optimization after this many gradient updates
have been processed. If not provided, uses early stopping to decide
when to halt.
Yields
------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset.
'''
self._compile(**kwargs)
if valid is None:
valid = train
iteration = 0
training = validation = None
while max_updates is None or iteration < max_updates:
if not iteration % self.validate_every:
try:
validation = self.evaluate(valid)
except KeyboardInterrupt:
util.log('interrupted!')
break
if self._test_patience(validation):
util.log('patience elapsed!')
break
try:
training = self._step(train)
except KeyboardInterrupt:
util.log('interrupted!')
break
iteration += 1
self._log(training, iteration)
yield training, validation
self.set_params('best') | python | def iterate(self, train=None, valid=None, max_updates=None, **kwargs):
r'''Optimize a loss iteratively using a training and validation dataset.
This method yields a series of monitor values to the caller. After every
optimization epoch, a pair of monitor dictionaries is generated: one
evaluated on the training dataset during the epoch, and another
evaluated on the validation dataset at the most recent validation epoch.
The validation monitors might not be updated during every optimization
iteration; in this case, the most recent validation monitors will be
yielded along with the training monitors.
Additional keyword arguments supplied here will set the global
optimizer attributes.
Parameters
----------
train : sequence or :class:`Dataset <downhill.dataset.Dataset>`
A set of training data for computing updates to model parameters.
valid : sequence or :class:`Dataset <downhill.dataset.Dataset>`
A set of validation data for computing monitor values and
determining when the loss has stopped improving. Defaults to the
training data.
max_updates : int, optional
If specified, halt optimization after this many gradient updates
have been processed. If not provided, uses early stopping to decide
when to halt.
Yields
------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset.
'''
self._compile(**kwargs)
if valid is None:
valid = train
iteration = 0
training = validation = None
while max_updates is None or iteration < max_updates:
if not iteration % self.validate_every:
try:
validation = self.evaluate(valid)
except KeyboardInterrupt:
util.log('interrupted!')
break
if self._test_patience(validation):
util.log('patience elapsed!')
break
try:
training = self._step(train)
except KeyboardInterrupt:
util.log('interrupted!')
break
iteration += 1
self._log(training, iteration)
yield training, validation
self.set_params('best') | [
"def",
"iterate",
"(",
"self",
",",
"train",
"=",
"None",
",",
"valid",
"=",
"None",
",",
"max_updates",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_compile",
"(",
"*",
"*",
"kwargs",
")",
"if",
"valid",
"is",
"None",
":",
"val... | r'''Optimize a loss iteratively using a training and validation dataset.
This method yields a series of monitor values to the caller. After every
optimization epoch, a pair of monitor dictionaries is generated: one
evaluated on the training dataset during the epoch, and another
evaluated on the validation dataset at the most recent validation epoch.
The validation monitors might not be updated during every optimization
iteration; in this case, the most recent validation monitors will be
yielded along with the training monitors.
Additional keyword arguments supplied here will set the global
optimizer attributes.
Parameters
----------
train : sequence or :class:`Dataset <downhill.dataset.Dataset>`
A set of training data for computing updates to model parameters.
valid : sequence or :class:`Dataset <downhill.dataset.Dataset>`
A set of validation data for computing monitor values and
determining when the loss has stopped improving. Defaults to the
training data.
max_updates : int, optional
If specified, halt optimization after this many gradient updates
have been processed. If not provided, uses early stopping to decide
when to halt.
Yields
------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset. | [
"r",
"Optimize",
"a",
"loss",
"iteratively",
"using",
"a",
"training",
"and",
"validation",
"dataset",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L354-L415 | train |
lmjohns3/downhill | downhill/base.py | Optimizer.minimize | def minimize(self, *args, **kwargs):
'''Optimize our loss exhaustively.
This method is a thin wrapper over the :func:`iterate` method. It simply
exhausts the iterative optimization process and returns the final
monitor values.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset.
'''
monitors = None
for monitors in self.iterate(*args, **kwargs):
pass
return monitors | python | def minimize(self, *args, **kwargs):
'''Optimize our loss exhaustively.
This method is a thin wrapper over the :func:`iterate` method. It simply
exhausts the iterative optimization process and returns the final
monitor values.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset.
'''
monitors = None
for monitors in self.iterate(*args, **kwargs):
pass
return monitors | [
"def",
"minimize",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"monitors",
"=",
"None",
"for",
"monitors",
"in",
"self",
".",
"iterate",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"pass",
"return",
"monitors"
] | Optimize our loss exhaustively.
This method is a thin wrapper over the :func:`iterate` method. It simply
exhausts the iterative optimization process and returns the final
monitor values.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values, evaluated on the
training dataset.
valid_monitors : dict
A dictionary containing monitor values evaluated on the validation
dataset. | [
"Optimize",
"our",
"loss",
"exhaustively",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L417-L436 | train |
lmjohns3/downhill | downhill/base.py | Optimizer._step | def _step(self, dataset):
'''Advance the state of the optimizer by one step.
Parameters
----------
dataset : :class:`Dataset <downhill.dataset.Dataset>`
A dataset for optimizing the model.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values.
'''
if dataset is None:
values = [self.f_step()]
else:
values = [self.f_step(*x) for x in dataset]
return collections.OrderedDict(
zip(self._monitor_names, np.mean(values, axis=0))) | python | def _step(self, dataset):
'''Advance the state of the optimizer by one step.
Parameters
----------
dataset : :class:`Dataset <downhill.dataset.Dataset>`
A dataset for optimizing the model.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values.
'''
if dataset is None:
values = [self.f_step()]
else:
values = [self.f_step(*x) for x in dataset]
return collections.OrderedDict(
zip(self._monitor_names, np.mean(values, axis=0))) | [
"def",
"_step",
"(",
"self",
",",
"dataset",
")",
":",
"if",
"dataset",
"is",
"None",
":",
"values",
"=",
"[",
"self",
".",
"f_step",
"(",
")",
"]",
"else",
":",
"values",
"=",
"[",
"self",
".",
"f_step",
"(",
"*",
"x",
")",
"for",
"x",
"in",
... | Advance the state of the optimizer by one step.
Parameters
----------
dataset : :class:`Dataset <downhill.dataset.Dataset>`
A dataset for optimizing the model.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to values. | [
"Advance",
"the",
"state",
"of",
"the",
"optimizer",
"by",
"one",
"step",
"."
] | 42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d | https://github.com/lmjohns3/downhill/blob/42111ab03b5e6fa47b7bf7c7cb5caa402f10ce6d/downhill/base.py#L438-L456 | train |
timothycrosley/connectable | connectable/base.py | accept_arguments | def accept_arguments(method, number_of_arguments=1):
"""Returns True if the given method will accept the given number of arguments
method: the method to perform introspection on
number_of_arguments: the number_of_arguments
"""
if 'method' in method.__class__.__name__:
number_of_arguments += 1
func = getattr(method, 'im_func', getattr(method, '__func__'))
func_defaults = getattr(func, 'func_defaults', getattr(func, '__defaults__'))
number_of_defaults = func_defaults and len(func_defaults) or 0
elif method.__class__.__name__ == 'function':
func_defaults = getattr(method, 'func_defaults', getattr(method, '__defaults__'))
number_of_defaults = func_defaults and len(func_defaults) or 0
coArgCount = getattr(method, 'func_code', getattr(method, '__code__')).co_argcount
if(coArgCount >= number_of_arguments and coArgCount - number_of_defaults <= number_of_arguments):
return True
return False | python | def accept_arguments(method, number_of_arguments=1):
"""Returns True if the given method will accept the given number of arguments
method: the method to perform introspection on
number_of_arguments: the number_of_arguments
"""
if 'method' in method.__class__.__name__:
number_of_arguments += 1
func = getattr(method, 'im_func', getattr(method, '__func__'))
func_defaults = getattr(func, 'func_defaults', getattr(func, '__defaults__'))
number_of_defaults = func_defaults and len(func_defaults) or 0
elif method.__class__.__name__ == 'function':
func_defaults = getattr(method, 'func_defaults', getattr(method, '__defaults__'))
number_of_defaults = func_defaults and len(func_defaults) or 0
coArgCount = getattr(method, 'func_code', getattr(method, '__code__')).co_argcount
if(coArgCount >= number_of_arguments and coArgCount - number_of_defaults <= number_of_arguments):
return True
return False | [
"def",
"accept_arguments",
"(",
"method",
",",
"number_of_arguments",
"=",
"1",
")",
":",
"if",
"'method'",
"in",
"method",
".",
"__class__",
".",
"__name__",
":",
"number_of_arguments",
"+=",
"1",
"func",
"=",
"getattr",
"(",
"method",
",",
"'im_func'",
","... | Returns True if the given method will accept the given number of arguments
method: the method to perform introspection on
number_of_arguments: the number_of_arguments | [
"Returns",
"True",
"if",
"the",
"given",
"method",
"will",
"accept",
"the",
"given",
"number",
"of",
"arguments"
] | d5958d974c04b16f410c602786809d0e2a6665d2 | https://github.com/timothycrosley/connectable/blob/d5958d974c04b16f410c602786809d0e2a6665d2/connectable/base.py#L98-L117 | train |
timothycrosley/connectable | connectable/base.py | Connectable.emit | def emit(self, signal, value=None, gather=False):
"""Emits a signal, causing all slot methods connected with the signal to be called (optionally w/ related value)
signal: the name of the signal to emit, must be defined in the classes 'signals' list.
value: the value to pass to all connected slot methods.
gather: if set, causes emit to return a list of all slot results
"""
results = [] if gather else True
if hasattr(self, 'connections') and signal in self.connections:
for condition, values in self.connections[signal].items():
if condition is None or condition == value or (callable(condition) and condition(value)):
for slot, transform in values.items():
if transform is not None:
if callable(transform):
used_value = transform(value)
elif isinstance(transform, str):
used_value = transform.format(value=value)
else:
used_value = transform
else:
used_value = value
if used_value is not None:
if(accept_arguments(slot, 1)):
result = slot(used_value)
elif(accept_arguments(slot, 0)):
result = slot()
else:
result = ''
else:
result = slot()
if gather:
results.append(result)
return results | python | def emit(self, signal, value=None, gather=False):
"""Emits a signal, causing all slot methods connected with the signal to be called (optionally w/ related value)
signal: the name of the signal to emit, must be defined in the classes 'signals' list.
value: the value to pass to all connected slot methods.
gather: if set, causes emit to return a list of all slot results
"""
results = [] if gather else True
if hasattr(self, 'connections') and signal in self.connections:
for condition, values in self.connections[signal].items():
if condition is None or condition == value or (callable(condition) and condition(value)):
for slot, transform in values.items():
if transform is not None:
if callable(transform):
used_value = transform(value)
elif isinstance(transform, str):
used_value = transform.format(value=value)
else:
used_value = transform
else:
used_value = value
if used_value is not None:
if(accept_arguments(slot, 1)):
result = slot(used_value)
elif(accept_arguments(slot, 0)):
result = slot()
else:
result = ''
else:
result = slot()
if gather:
results.append(result)
return results | [
"def",
"emit",
"(",
"self",
",",
"signal",
",",
"value",
"=",
"None",
",",
"gather",
"=",
"False",
")",
":",
"results",
"=",
"[",
"]",
"if",
"gather",
"else",
"True",
"if",
"hasattr",
"(",
"self",
",",
"'connections'",
")",
"and",
"signal",
"in",
"... | Emits a signal, causing all slot methods connected with the signal to be called (optionally w/ related value)
signal: the name of the signal to emit, must be defined in the classes 'signals' list.
value: the value to pass to all connected slot methods.
gather: if set, causes emit to return a list of all slot results | [
"Emits",
"a",
"signal",
"causing",
"all",
"slot",
"methods",
"connected",
"with",
"the",
"signal",
"to",
"be",
"called",
"(",
"optionally",
"w",
"/",
"related",
"value",
")"
] | d5958d974c04b16f410c602786809d0e2a6665d2 | https://github.com/timothycrosley/connectable/blob/d5958d974c04b16f410c602786809d0e2a6665d2/connectable/base.py#L22-L57 | train |
timothycrosley/connectable | connectable/base.py | Connectable.connect | def connect(self, signal, slot, transform=None, condition=None):
"""Defines a connection between this objects signal and another objects slot
signal: the signal this class will emit, to cause the slot method to be called
receiver: the object containing the slot method to be called
slot: the slot method to call
transform: an optional value override to pass into the slot method as the first variable
condition: only call the slot if the value emitted matches the required value or calling required returns True
"""
if not signal in self.signals:
print("WARNING: {0} is trying to connect a slot to an undefined signal: {1}".format(self.__class__.__name__,
str(signal)))
return
if not hasattr(self, 'connections'):
self.connections = {}
connection = self.connections.setdefault(signal, {})
connection = connection.setdefault(condition, {})
connection[slot] = transform | python | def connect(self, signal, slot, transform=None, condition=None):
"""Defines a connection between this objects signal and another objects slot
signal: the signal this class will emit, to cause the slot method to be called
receiver: the object containing the slot method to be called
slot: the slot method to call
transform: an optional value override to pass into the slot method as the first variable
condition: only call the slot if the value emitted matches the required value or calling required returns True
"""
if not signal in self.signals:
print("WARNING: {0} is trying to connect a slot to an undefined signal: {1}".format(self.__class__.__name__,
str(signal)))
return
if not hasattr(self, 'connections'):
self.connections = {}
connection = self.connections.setdefault(signal, {})
connection = connection.setdefault(condition, {})
connection[slot] = transform | [
"def",
"connect",
"(",
"self",
",",
"signal",
",",
"slot",
",",
"transform",
"=",
"None",
",",
"condition",
"=",
"None",
")",
":",
"if",
"not",
"signal",
"in",
"self",
".",
"signals",
":",
"print",
"(",
"\"WARNING: {0} is trying to connect a slot to an undefin... | Defines a connection between this objects signal and another objects slot
signal: the signal this class will emit, to cause the slot method to be called
receiver: the object containing the slot method to be called
slot: the slot method to call
transform: an optional value override to pass into the slot method as the first variable
condition: only call the slot if the value emitted matches the required value or calling required returns True | [
"Defines",
"a",
"connection",
"between",
"this",
"objects",
"signal",
"and",
"another",
"objects",
"slot"
] | d5958d974c04b16f410c602786809d0e2a6665d2 | https://github.com/timothycrosley/connectable/blob/d5958d974c04b16f410c602786809d0e2a6665d2/connectable/base.py#L59-L77 | train |
timothycrosley/connectable | connectable/base.py | Connectable.disconnect | def disconnect(self, signal=None, slot=None, transform=None, condition=None):
"""Removes connection(s) between this objects signal and connected slot(s)
signal: the signal this class will emit, to cause the slot method to be called
receiver: the object containing the slot method to be called
slot: the slot method or function to call
transform: an optional value override to pass into the slot method as the first variable
condition: only call the slot method if the value emitted matches this condition
"""
if slot:
self.connections[signal][condition].pop(slot, None)
elif condition is not None:
self.connections[signal].pop(condition, None)
elif signal:
self.connections.pop(signal, None)
else:
delattr(self, 'connections') | python | def disconnect(self, signal=None, slot=None, transform=None, condition=None):
"""Removes connection(s) between this objects signal and connected slot(s)
signal: the signal this class will emit, to cause the slot method to be called
receiver: the object containing the slot method to be called
slot: the slot method or function to call
transform: an optional value override to pass into the slot method as the first variable
condition: only call the slot method if the value emitted matches this condition
"""
if slot:
self.connections[signal][condition].pop(slot, None)
elif condition is not None:
self.connections[signal].pop(condition, None)
elif signal:
self.connections.pop(signal, None)
else:
delattr(self, 'connections') | [
"def",
"disconnect",
"(",
"self",
",",
"signal",
"=",
"None",
",",
"slot",
"=",
"None",
",",
"transform",
"=",
"None",
",",
"condition",
"=",
"None",
")",
":",
"if",
"slot",
":",
"self",
".",
"connections",
"[",
"signal",
"]",
"[",
"condition",
"]",
... | Removes connection(s) between this objects signal and connected slot(s)
signal: the signal this class will emit, to cause the slot method to be called
receiver: the object containing the slot method to be called
slot: the slot method or function to call
transform: an optional value override to pass into the slot method as the first variable
condition: only call the slot method if the value emitted matches this condition | [
"Removes",
"connection",
"(",
"s",
")",
"between",
"this",
"objects",
"signal",
"and",
"connected",
"slot",
"(",
"s",
")"
] | d5958d974c04b16f410c602786809d0e2a6665d2 | https://github.com/timothycrosley/connectable/blob/d5958d974c04b16f410c602786809d0e2a6665d2/connectable/base.py#L79-L95 | train |
Lagg/steamodd | steam/sim.py | inventory_context.get | def get(self, key):
""" Returns context data for a given app, can be an ID or a case insensitive name """
keystr = str(key)
res = None
try:
res = self.ctx[keystr]
except KeyError:
for k, v in self.ctx.items():
if "name" in v and v["name"].lower() == keystr.lower():
res = v
break
return res | python | def get(self, key):
""" Returns context data for a given app, can be an ID or a case insensitive name """
keystr = str(key)
res = None
try:
res = self.ctx[keystr]
except KeyError:
for k, v in self.ctx.items():
if "name" in v and v["name"].lower() == keystr.lower():
res = v
break
return res | [
"def",
"get",
"(",
"self",
",",
"key",
")",
":",
"keystr",
"=",
"str",
"(",
"key",
")",
"res",
"=",
"None",
"try",
":",
"res",
"=",
"self",
".",
"ctx",
"[",
"keystr",
"]",
"except",
"KeyError",
":",
"for",
"k",
",",
"v",
"in",
"self",
".",
"c... | Returns context data for a given app, can be an ID or a case insensitive name | [
"Returns",
"context",
"data",
"for",
"a",
"given",
"app",
"can",
"be",
"an",
"ID",
"or",
"a",
"case",
"insensitive",
"name"
] | 2e9ced4e7a6dbe3e09d5a648450bafc12b937b95 | https://github.com/Lagg/steamodd/blob/2e9ced4e7a6dbe3e09d5a648450bafc12b937b95/steam/sim.py#L35-L48 | train |
Lagg/steamodd | steam/sim.py | item.hash_name | def hash_name(self):
""" The URL-friendly identifier for the item. Generates its own approximation if one isn't available """
name = self._item.get("market_hash_name")
if not name:
name = "{0.appid}-{0.name}".format(self)
return name | python | def hash_name(self):
""" The URL-friendly identifier for the item. Generates its own approximation if one isn't available """
name = self._item.get("market_hash_name")
if not name:
name = "{0.appid}-{0.name}".format(self)
return name | [
"def",
"hash_name",
"(",
"self",
")",
":",
"name",
"=",
"self",
".",
"_item",
".",
"get",
"(",
"\"market_hash_name\"",
")",
"if",
"not",
"name",
":",
"name",
"=",
"\"{0.appid}-{0.name}\"",
".",
"format",
"(",
"self",
")",
"return",
"name"
] | The URL-friendly identifier for the item. Generates its own approximation if one isn't available | [
"The",
"URL",
"-",
"friendly",
"identifier",
"for",
"the",
"item",
".",
"Generates",
"its",
"own",
"approximation",
"if",
"one",
"isn",
"t",
"available"
] | 2e9ced4e7a6dbe3e09d5a648450bafc12b937b95 | https://github.com/Lagg/steamodd/blob/2e9ced4e7a6dbe3e09d5a648450bafc12b937b95/steam/sim.py#L260-L267 | train |
Lagg/steamodd | steam/sim.py | item.quality | def quality(self):
""" Can't really trust presence of a schema here, but there is an ID sometimes """
try:
qid = int((self.tool_metadata or {}).get("quality", 0))
except:
qid = 0
# We might be able to get the quality strings from the item's tags
internal_name, name = "normal", "Normal"
if self.tags:
tags = {x.get('category'): x for x in self.tags}
if 'Quality' in tags:
internal_name, name = tags['Quality'].get('internal_name'), tags['Quality'].get('name')
return qid, internal_name, name | python | def quality(self):
""" Can't really trust presence of a schema here, but there is an ID sometimes """
try:
qid = int((self.tool_metadata or {}).get("quality", 0))
except:
qid = 0
# We might be able to get the quality strings from the item's tags
internal_name, name = "normal", "Normal"
if self.tags:
tags = {x.get('category'): x for x in self.tags}
if 'Quality' in tags:
internal_name, name = tags['Quality'].get('internal_name'), tags['Quality'].get('name')
return qid, internal_name, name | [
"def",
"quality",
"(",
"self",
")",
":",
"try",
":",
"qid",
"=",
"int",
"(",
"(",
"self",
".",
"tool_metadata",
"or",
"{",
"}",
")",
".",
"get",
"(",
"\"quality\"",
",",
"0",
")",
")",
"except",
":",
"qid",
"=",
"0",
"# We might be able to get the qu... | Can't really trust presence of a schema here, but there is an ID sometimes | [
"Can",
"t",
"really",
"trust",
"presence",
"of",
"a",
"schema",
"here",
"but",
"there",
"is",
"an",
"ID",
"sometimes"
] | 2e9ced4e7a6dbe3e09d5a648450bafc12b937b95 | https://github.com/Lagg/steamodd/blob/2e9ced4e7a6dbe3e09d5a648450bafc12b937b95/steam/sim.py#L292-L306 | train |
Lagg/steamodd | steam/api.py | key.get | def get(cls):
"""Get the current API key.
if one has not been given via 'set' the env var STEAMODD_API_KEY will
be checked instead.
"""
apikey = cls.__api_key or cls.__api_key_env_var
if apikey:
return apikey
else:
raise APIKeyMissingError("API key not set") | python | def get(cls):
"""Get the current API key.
if one has not been given via 'set' the env var STEAMODD_API_KEY will
be checked instead.
"""
apikey = cls.__api_key or cls.__api_key_env_var
if apikey:
return apikey
else:
raise APIKeyMissingError("API key not set") | [
"def",
"get",
"(",
"cls",
")",
":",
"apikey",
"=",
"cls",
".",
"__api_key",
"or",
"cls",
".",
"__api_key_env_var",
"if",
"apikey",
":",
"return",
"apikey",
"else",
":",
"raise",
"APIKeyMissingError",
"(",
"\"API key not set\"",
")"
] | Get the current API key.
if one has not been given via 'set' the env var STEAMODD_API_KEY will
be checked instead. | [
"Get",
"the",
"current",
"API",
"key",
".",
"if",
"one",
"has",
"not",
"been",
"given",
"via",
"set",
"the",
"env",
"var",
"STEAMODD_API_KEY",
"will",
"be",
"checked",
"instead",
"."
] | 2e9ced4e7a6dbe3e09d5a648450bafc12b937b95 | https://github.com/Lagg/steamodd/blob/2e9ced4e7a6dbe3e09d5a648450bafc12b937b95/steam/api.py#L77-L87 | train |
Lagg/steamodd | steam/api.py | method_result.call | def call(self):
""" Make the API call again and fetch fresh data. """
data = self._downloader.download()
# Only try to pass errors arg if supported
if sys.version >= "2.7":
data = data.decode("utf-8", errors="ignore")
else:
data = data.decode("utf-8")
self.update(json.loads(data))
self._fetched = True | python | def call(self):
""" Make the API call again and fetch fresh data. """
data = self._downloader.download()
# Only try to pass errors arg if supported
if sys.version >= "2.7":
data = data.decode("utf-8", errors="ignore")
else:
data = data.decode("utf-8")
self.update(json.loads(data))
self._fetched = True | [
"def",
"call",
"(",
"self",
")",
":",
"data",
"=",
"self",
".",
"_downloader",
".",
"download",
"(",
")",
"# Only try to pass errors arg if supported",
"if",
"sys",
".",
"version",
">=",
"\"2.7\"",
":",
"data",
"=",
"data",
".",
"decode",
"(",
"\"utf-8\"",
... | Make the API call again and fetch fresh data. | [
"Make",
"the",
"API",
"call",
"again",
"and",
"fetch",
"fresh",
"data",
"."
] | 2e9ced4e7a6dbe3e09d5a648450bafc12b937b95 | https://github.com/Lagg/steamodd/blob/2e9ced4e7a6dbe3e09d5a648450bafc12b937b95/steam/api.py#L248-L259 | train |
Lagg/steamodd | steam/items.py | schema._attribute_definition | def _attribute_definition(self, attrid):
""" Returns the attribute definition dict of a given attribute
ID, can be the name or the integer ID """
attrs = self._schema["attributes"]
try:
# Make a new dict to avoid side effects
return dict(attrs[attrid])
except KeyError:
attr_names = self._schema["attribute_names"]
attrdef = attrs.get(attr_names.get(str(attrid).lower()))
if not attrdef:
return None
else:
return dict(attrdef) | python | def _attribute_definition(self, attrid):
""" Returns the attribute definition dict of a given attribute
ID, can be the name or the integer ID """
attrs = self._schema["attributes"]
try:
# Make a new dict to avoid side effects
return dict(attrs[attrid])
except KeyError:
attr_names = self._schema["attribute_names"]
attrdef = attrs.get(attr_names.get(str(attrid).lower()))
if not attrdef:
return None
else:
return dict(attrdef) | [
"def",
"_attribute_definition",
"(",
"self",
",",
"attrid",
")",
":",
"attrs",
"=",
"self",
".",
"_schema",
"[",
"\"attributes\"",
"]",
"try",
":",
"# Make a new dict to avoid side effects",
"return",
"dict",
"(",
"attrs",
"[",
"attrid",
"]",
")",
"except",
"K... | Returns the attribute definition dict of a given attribute
ID, can be the name or the integer ID | [
"Returns",
"the",
"attribute",
"definition",
"dict",
"of",
"a",
"given",
"attribute",
"ID",
"can",
"be",
"the",
"name",
"or",
"the",
"integer",
"ID"
] | 2e9ced4e7a6dbe3e09d5a648450bafc12b937b95 | https://github.com/Lagg/steamodd/blob/2e9ced4e7a6dbe3e09d5a648450bafc12b937b95/steam/items.py#L130-L145 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.