repo
stringlengths
7
54
path
stringlengths
4
192
url
stringlengths
87
284
code
stringlengths
78
104k
code_tokens
list
docstring
stringlengths
1
46.9k
docstring_tokens
list
language
stringclasses
1 value
partition
stringclasses
3 values
mozilla/crontabber
crontabber/mixins.py
https://github.com/mozilla/crontabber/blob/b510be349e71f165c1a9506db95bda0b88728f8b/crontabber/mixins.py#L32-L114
def with_transactional_resource( transactional_resource_class, resource_name, reference_value_from=None ): """a class decorator for Crontabber Apps. This decorator will give access to a resource connection source. Configuration will be automatically set up and the cron app can expect to have attributes: self.{resource_name}_connection_factory self.{resource_name}_transaction_executor available to use. Within the setup, the RequiredConfig structure gets set up like this: config.{resource_name}.{resource_name}_class = \ transactional_resource_class config.{resource_name}.{resource_name}_transaction_executor_class = \ 'crontabber.transaction_executor.TransactionExecutor' parameters: transactional_resource_class - a string representing the full path of the class that represents a connection to the resource. An example is "crontabber.connection_factory.ConnectionFactory". resource_name - a string that will serve as an identifier for this resource within the mixin. For example, if the resource is 'database' we'll see configman namespace in the cron job section of "...class-SomeCronJob.database.database_connection_class" and "...class-SomeCronJob.database.transaction_executor_class" """ def class_decorator(cls): if not issubclass(cls, RequiredConfig): raise Exception( '%s must have RequiredConfig as a base class' % cls ) new_req = cls.get_required_config() new_req.namespace(resource_name) new_req[resource_name].add_option( '%s_class' % resource_name, default=transactional_resource_class, from_string_converter=class_converter, reference_value_from=reference_value_from, ) new_req[resource_name].add_option( '%s_transaction_executor_class' % resource_name, default='crontabber.transaction_executor.TransactionExecutor', doc='a class that will execute transactions', from_string_converter=class_converter, reference_value_from=reference_value_from ) cls.required_config = new_req #------------------------------------------------------------------ def new__init__(self, *args, **kwargs): # instantiate the connection class for the resource super(cls, self).__init__(*args, **kwargs) setattr( self, "%s_connection_factory" % resource_name, self.config[resource_name]['%s_class' % resource_name]( self.config[resource_name] ) ) # instantiate a transaction executor bound to the # resource connection setattr( self, "%s_transaction_executor" % resource_name, self.config[resource_name][ '%s_transaction_executor_class' % resource_name ]( self.config[resource_name], getattr(self, "%s_connection_factory" % resource_name) ) ) if hasattr(cls, '__init__'): original_init = cls.__init__ def both_inits(self, *args, **kwargs): new__init__(self, *args, **kwargs) return original_init(self, *args, **kwargs) cls.__init__ = both_inits else: cls.__init__ = new__init__ return cls return class_decorator
[ "def", "with_transactional_resource", "(", "transactional_resource_class", ",", "resource_name", ",", "reference_value_from", "=", "None", ")", ":", "def", "class_decorator", "(", "cls", ")", ":", "if", "not", "issubclass", "(", "cls", ",", "RequiredConfig", ")", ...
a class decorator for Crontabber Apps. This decorator will give access to a resource connection source. Configuration will be automatically set up and the cron app can expect to have attributes: self.{resource_name}_connection_factory self.{resource_name}_transaction_executor available to use. Within the setup, the RequiredConfig structure gets set up like this: config.{resource_name}.{resource_name}_class = \ transactional_resource_class config.{resource_name}.{resource_name}_transaction_executor_class = \ 'crontabber.transaction_executor.TransactionExecutor' parameters: transactional_resource_class - a string representing the full path of the class that represents a connection to the resource. An example is "crontabber.connection_factory.ConnectionFactory". resource_name - a string that will serve as an identifier for this resource within the mixin. For example, if the resource is 'database' we'll see configman namespace in the cron job section of "...class-SomeCronJob.database.database_connection_class" and "...class-SomeCronJob.database.transaction_executor_class"
[ "a", "class", "decorator", "for", "Crontabber", "Apps", ".", "This", "decorator", "will", "give", "access", "to", "a", "resource", "connection", "source", ".", "Configuration", "will", "be", "automatically", "set", "up", "and", "the", "cron", "app", "can", "...
python
train
knipknap/exscript
Exscript/account.py
https://github.com/knipknap/exscript/blob/72718eee3e87b345d5a5255be9824e867e42927b/Exscript/account.py#L560-L573
def release_accounts(self, owner): """ Releases all accounts that were acquired by the given owner. :type owner: object :param owner: The owner descriptor as passed to acquire_account(). """ with self.unlock_cond: for account in self.owner2account[owner]: self.account2owner.pop(account) account.release(False) self.unlocked_accounts.append(account) self.owner2account.pop(owner) self.unlock_cond.notify_all()
[ "def", "release_accounts", "(", "self", ",", "owner", ")", ":", "with", "self", ".", "unlock_cond", ":", "for", "account", "in", "self", ".", "owner2account", "[", "owner", "]", ":", "self", ".", "account2owner", ".", "pop", "(", "account", ")", "account...
Releases all accounts that were acquired by the given owner. :type owner: object :param owner: The owner descriptor as passed to acquire_account().
[ "Releases", "all", "accounts", "that", "were", "acquired", "by", "the", "given", "owner", "." ]
python
train
tchellomello/python-arlo
pyarlo/camera.py
https://github.com/tchellomello/python-arlo/blob/db70aeb81705309c56ad32bbab1094f6cd146524/pyarlo/camera.py#L171-L175
def make_video_cache(self, days=None): """Save videos on _cache_videos to avoid dups.""" if days is None: days = self._min_days_vdo_cache self._cached_videos = self.videos(days)
[ "def", "make_video_cache", "(", "self", ",", "days", "=", "None", ")", ":", "if", "days", "is", "None", ":", "days", "=", "self", ".", "_min_days_vdo_cache", "self", ".", "_cached_videos", "=", "self", ".", "videos", "(", "days", ")" ]
Save videos on _cache_videos to avoid dups.
[ "Save", "videos", "on", "_cache_videos", "to", "avoid", "dups", "." ]
python
train
kata198/AdvancedHTMLParser
AdvancedHTMLParser/Parser.py
https://github.com/kata198/AdvancedHTMLParser/blob/06aeea5d8e2ea86e155aae0fc237623d3e9b7f9d/AdvancedHTMLParser/Parser.py#L407-L423
def getElementsWithAttrValues(self, attrName, attrValues, root='root'): ''' getElementsWithAttrValues - Returns elements with an attribute, named by #attrName contains one of the values in the list, #values @param attrName <lowercase str> - A lowercase attribute name @param attrValues set<str> - A set of all valid values. @return - TagCollection of all matching elements ''' (root, isFromRoot) = self._handleRootArg(root) if type(attrValues) != set: attrValues = set(attrValues) return root.getElementsWithAttrValues(attrName, attrValues)
[ "def", "getElementsWithAttrValues", "(", "self", ",", "attrName", ",", "attrValues", ",", "root", "=", "'root'", ")", ":", "(", "root", ",", "isFromRoot", ")", "=", "self", ".", "_handleRootArg", "(", "root", ")", "if", "type", "(", "attrValues", ")", "!...
getElementsWithAttrValues - Returns elements with an attribute, named by #attrName contains one of the values in the list, #values @param attrName <lowercase str> - A lowercase attribute name @param attrValues set<str> - A set of all valid values. @return - TagCollection of all matching elements
[ "getElementsWithAttrValues", "-", "Returns", "elements", "with", "an", "attribute", "named", "by", "#attrName", "contains", "one", "of", "the", "values", "in", "the", "list", "#values" ]
python
train
GeospatialPython/pyshp
shapefile.py
https://github.com/GeospatialPython/pyshp/blob/71231ddc5aa54f155d4f0563c56006fffbfc84e7/shapefile.py#L1673-L1678
def linem(self, lines): """Creates a POLYLINEM shape. Lines is a collection of lines, each made up of a list of xym values. If the m (measure) value is not included, it defaults to None (NoData).""" shapeType = POLYLINEM self._shapeparts(parts=lines, shapeType=shapeType)
[ "def", "linem", "(", "self", ",", "lines", ")", ":", "shapeType", "=", "POLYLINEM", "self", ".", "_shapeparts", "(", "parts", "=", "lines", ",", "shapeType", "=", "shapeType", ")" ]
Creates a POLYLINEM shape. Lines is a collection of lines, each made up of a list of xym values. If the m (measure) value is not included, it defaults to None (NoData).
[ "Creates", "a", "POLYLINEM", "shape", ".", "Lines", "is", "a", "collection", "of", "lines", "each", "made", "up", "of", "a", "list", "of", "xym", "values", ".", "If", "the", "m", "(", "measure", ")", "value", "is", "not", "included", "it", "defaults", ...
python
train
blockstack/blockstack-core
blockstack/lib/atlas.py
https://github.com/blockstack/blockstack-core/blob/1dcfdd39b152d29ce13e736a6a1a0981401a0505/blockstack/lib/atlas.py#L2725-L2749
def get_neighbors( self, peer_hostport, con=None, path=None, peer_table=None ): """ Get neighbors of this peer NOTE: don't lock peer table in production """ if path is None: path = self.atlasdb_path if self.neighbors_timeout is None: self.neighbors_timeout = atlas_neighbors_timeout() peer_hostport = self.canonical_peer( peer_hostport ) neighbors = None if peer_hostport == self.my_hostport: neighbors = atlas_get_live_neighbors( None, peer_table=peer_table ) else: neighbors = atlas_peer_get_neighbors( self.my_hostport, peer_hostport, timeout=self.neighbors_timeout, peer_table=peer_table, path=path, con=con ) if neighbors is not None: log.debug("%s: neighbors of %s are (%s): %s" % (self.my_hostport, peer_hostport, len(neighbors), ",".join(neighbors))) else: log.error("%s: failed to ask %s for neighbors" % (self.my_hostport, peer_hostport)) return neighbors
[ "def", "get_neighbors", "(", "self", ",", "peer_hostport", ",", "con", "=", "None", ",", "path", "=", "None", ",", "peer_table", "=", "None", ")", ":", "if", "path", "is", "None", ":", "path", "=", "self", ".", "atlasdb_path", "if", "self", ".", "nei...
Get neighbors of this peer NOTE: don't lock peer table in production
[ "Get", "neighbors", "of", "this", "peer", "NOTE", ":", "don", "t", "lock", "peer", "table", "in", "production" ]
python
train
inasafe/inasafe
safe/plugin.py
https://github.com/inasafe/inasafe/blob/831d60abba919f6d481dc94a8d988cc205130724/safe/plugin.py#L296-L308
def _create_batch_runner_action(self): """Create action for batch runner dialog.""" icon = resources_path('img', 'icons', 'show-batch-runner.svg') self.action_batch_runner = QAction( QIcon(icon), self.tr('Batch Runner'), self.iface.mainWindow()) self.action_batch_runner.setStatusTip(self.tr( 'Open Batch Runner')) self.action_batch_runner.setWhatsThis(self.tr( 'Open Batch Runner')) self.action_batch_runner.triggered.connect(self.show_batch_runner) self.add_action( self.action_batch_runner, add_to_toolbar=self.full_toolbar)
[ "def", "_create_batch_runner_action", "(", "self", ")", ":", "icon", "=", "resources_path", "(", "'img'", ",", "'icons'", ",", "'show-batch-runner.svg'", ")", "self", ".", "action_batch_runner", "=", "QAction", "(", "QIcon", "(", "icon", ")", ",", "self", ".",...
Create action for batch runner dialog.
[ "Create", "action", "for", "batch", "runner", "dialog", "." ]
python
train
Clinical-Genomics/scout
scout/server/blueprints/variants/controllers.py
https://github.com/Clinical-Genomics/scout/blob/90a551e2e1653a319e654c2405c2866f93d0ebb9/scout/server/blueprints/variants/controllers.py#L1128-L1133
def variant_acmg(store, institute_id, case_name, variant_id): """Collect data relevant for rendering ACMG classification form.""" institute_obj, case_obj = institute_and_case(store, institute_id, case_name) variant_obj = store.variant(variant_id) return dict(institute=institute_obj, case=case_obj, variant=variant_obj, CRITERIA=ACMG_CRITERIA, ACMG_OPTIONS=ACMG_OPTIONS)
[ "def", "variant_acmg", "(", "store", ",", "institute_id", ",", "case_name", ",", "variant_id", ")", ":", "institute_obj", ",", "case_obj", "=", "institute_and_case", "(", "store", ",", "institute_id", ",", "case_name", ")", "variant_obj", "=", "store", ".", "v...
Collect data relevant for rendering ACMG classification form.
[ "Collect", "data", "relevant", "for", "rendering", "ACMG", "classification", "form", "." ]
python
test
spencerahill/aospy
aospy/utils/times.py
https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L121-L145
def monthly_mean_at_each_ind(monthly_means, sub_monthly_timeseries): """Copy monthly mean over each time index in that month. Parameters ---------- monthly_means : xarray.DataArray array of monthly means sub_monthly_timeseries : xarray.DataArray array of a timeseries at sub-monthly time resolution Returns ------- xarray.DataArray with eath monthly mean value from `monthly_means` repeated at each time within that month from `sub_monthly_timeseries` See Also -------- monthly_mean_ts : Create timeseries of monthly mean values """ time = monthly_means[TIME_STR] start = time.indexes[TIME_STR][0].replace(day=1, hour=0) end = time.indexes[TIME_STR][-1] new_indices = pd.DatetimeIndex(start=start, end=end, freq='MS') arr_new = monthly_means.reindex(time=new_indices, method='backfill') return arr_new.reindex_like(sub_monthly_timeseries, method='pad')
[ "def", "monthly_mean_at_each_ind", "(", "monthly_means", ",", "sub_monthly_timeseries", ")", ":", "time", "=", "monthly_means", "[", "TIME_STR", "]", "start", "=", "time", ".", "indexes", "[", "TIME_STR", "]", "[", "0", "]", ".", "replace", "(", "day", "=", ...
Copy monthly mean over each time index in that month. Parameters ---------- monthly_means : xarray.DataArray array of monthly means sub_monthly_timeseries : xarray.DataArray array of a timeseries at sub-monthly time resolution Returns ------- xarray.DataArray with eath monthly mean value from `monthly_means` repeated at each time within that month from `sub_monthly_timeseries` See Also -------- monthly_mean_ts : Create timeseries of monthly mean values
[ "Copy", "monthly", "mean", "over", "each", "time", "index", "in", "that", "month", "." ]
python
train
dask/dask-kubernetes
dask_kubernetes/objects.py
https://github.com/dask/dask-kubernetes/blob/8a4883ecd902460b446bb1f43ed97efe398a135e/dask_kubernetes/objects.py#L96-L184
def make_pod_spec( image, labels={}, threads_per_worker=1, env={}, extra_container_config={}, extra_pod_config={}, memory_limit=None, memory_request=None, cpu_limit=None, cpu_request=None, ): """ Create generic pod template from input parameters Examples -------- >>> make_pod_spec(image='daskdev/dask:latest', memory_limit='4G', memory_request='4G') """ args = [ 'dask-worker', '$(DASK_SCHEDULER_ADDRESS)', '--nthreads', str(threads_per_worker), '--death-timeout', '60', ] if memory_limit: args.extend(['--memory-limit', str(memory_limit)]) pod = client.V1Pod( metadata=client.V1ObjectMeta( labels=labels ), spec=client.V1PodSpec( restart_policy='Never', containers=[ client.V1Container( name='dask-worker', image=image, args=args, env=[client.V1EnvVar(name=k, value=v) for k, v in env.items()], ) ], tolerations=[ client.V1Toleration( key='k8s.dask.org/dedicated', operator='Equal', value='worker', effect='NoSchedule', ), # GKE currently does not permit creating taints on a node pool # with a `/` in the key field client.V1Toleration( key='k8s.dask.org_dedicated', operator='Equal', value='worker', effect='NoSchedule', ), ] ) ) resources = client.V1ResourceRequirements(limits={}, requests={}) if cpu_request: resources.requests['cpu'] = cpu_request if memory_request: resources.requests['memory'] = memory_request if cpu_limit: resources.limits['cpu'] = cpu_limit if memory_limit: resources.limits['memory'] = memory_limit pod.spec.containers[0].resources = resources for key, value in extra_container_config.items(): _set_k8s_attribute( pod.spec.containers[0], key, value ) for key, value in extra_pod_config.items(): _set_k8s_attribute( pod.spec, key, value ) return pod
[ "def", "make_pod_spec", "(", "image", ",", "labels", "=", "{", "}", ",", "threads_per_worker", "=", "1", ",", "env", "=", "{", "}", ",", "extra_container_config", "=", "{", "}", ",", "extra_pod_config", "=", "{", "}", ",", "memory_limit", "=", "None", ...
Create generic pod template from input parameters Examples -------- >>> make_pod_spec(image='daskdev/dask:latest', memory_limit='4G', memory_request='4G')
[ "Create", "generic", "pod", "template", "from", "input", "parameters" ]
python
train
gabstopper/smc-python
smc/actions/_search.py
https://github.com/gabstopper/smc-python/blob/e027b8a5dcfaf884eada32d113d41c1e56b32457/smc/actions/_search.py#L279-L300
def all_elements_by_type(name): """ Get specified elements based on the entry point verb from SMC api To get the entry points available, you can get these from the session:: session.cache.entry_points Execution will get the entry point for the element type, then get all elements that match. For example:: search.all_elements_by_type('host') :param name: top level entry point name :raises: `smc.api.exceptions.UnsupportedEntryPoint` :return: list with json representation of name match, else None """ if name: entry = element_entry_point(name) if entry: # in case an invalid entry point is specified result = element_by_href_as_json(entry) return result
[ "def", "all_elements_by_type", "(", "name", ")", ":", "if", "name", ":", "entry", "=", "element_entry_point", "(", "name", ")", "if", "entry", ":", "# in case an invalid entry point is specified", "result", "=", "element_by_href_as_json", "(", "entry", ")", "return"...
Get specified elements based on the entry point verb from SMC api To get the entry points available, you can get these from the session:: session.cache.entry_points Execution will get the entry point for the element type, then get all elements that match. For example:: search.all_elements_by_type('host') :param name: top level entry point name :raises: `smc.api.exceptions.UnsupportedEntryPoint` :return: list with json representation of name match, else None
[ "Get", "specified", "elements", "based", "on", "the", "entry", "point", "verb", "from", "SMC", "api", "To", "get", "the", "entry", "points", "available", "you", "can", "get", "these", "from", "the", "session", "::" ]
python
train
f3at/feat
src/feat/models/getter.py
https://github.com/f3at/feat/blob/15da93fc9d6ec8154f52a9172824e25821195ef8/src/feat/models/getter.py#L250-L262
def value_attr(attr_name): """ Creates a getter that will retrieve value's attribute with specified name. @param attr_name: the name of an attribute belonging to the value. @type attr_name: str """ def value_attr(value, context, **_params): value = getattr(value, attr_name) return _attr(value) return value_attr
[ "def", "value_attr", "(", "attr_name", ")", ":", "def", "value_attr", "(", "value", ",", "context", ",", "*", "*", "_params", ")", ":", "value", "=", "getattr", "(", "value", ",", "attr_name", ")", "return", "_attr", "(", "value", ")", "return", "value...
Creates a getter that will retrieve value's attribute with specified name. @param attr_name: the name of an attribute belonging to the value. @type attr_name: str
[ "Creates", "a", "getter", "that", "will", "retrieve", "value", "s", "attribute", "with", "specified", "name", "." ]
python
train
Bonsanto/polygon-geohasher
polygon_geohasher/polygon_geohasher.py
https://github.com/Bonsanto/polygon-geohasher/blob/63f27f41ea3e9d8fda7872d86217719286037c11/polygon_geohasher/polygon_geohasher.py#L8-L20
def geohash_to_polygon(geo): """ :param geo: String that represents the geohash. :return: Returns a Shapely's Polygon instance that represents the geohash. """ lat_centroid, lng_centroid, lat_offset, lng_offset = geohash.decode_exactly(geo) corner_1 = (lat_centroid - lat_offset, lng_centroid - lng_offset)[::-1] corner_2 = (lat_centroid - lat_offset, lng_centroid + lng_offset)[::-1] corner_3 = (lat_centroid + lat_offset, lng_centroid + lng_offset)[::-1] corner_4 = (lat_centroid + lat_offset, lng_centroid - lng_offset)[::-1] return geometry.Polygon([corner_1, corner_2, corner_3, corner_4, corner_1])
[ "def", "geohash_to_polygon", "(", "geo", ")", ":", "lat_centroid", ",", "lng_centroid", ",", "lat_offset", ",", "lng_offset", "=", "geohash", ".", "decode_exactly", "(", "geo", ")", "corner_1", "=", "(", "lat_centroid", "-", "lat_offset", ",", "lng_centroid", ...
:param geo: String that represents the geohash. :return: Returns a Shapely's Polygon instance that represents the geohash.
[ ":", "param", "geo", ":", "String", "that", "represents", "the", "geohash", ".", ":", "return", ":", "Returns", "a", "Shapely", "s", "Polygon", "instance", "that", "represents", "the", "geohash", "." ]
python
train
h2non/pook
pook/mock.py
https://github.com/h2non/pook/blob/e64094e41e4d89d98d2d29af7608ef27dc50cf19/pook/mock.py#L697-L752
def match(self, request): """ Matches an outgoing HTTP request against the current mock matchers. This method acts like a delegator to `pook.MatcherEngine`. Arguments: request (pook.Request): request instance to match. Raises: Exception: if the mock has an exception defined. Returns: tuple(bool, list[Exception]): ``True`` if the mock matches the outgoing HTTP request, otherwise ``False``. Also returns an optional list of error exceptions. """ # If mock already expired, fail it if self._times <= 0: raise PookExpiredMock('Mock expired') # Trigger mock filters for test in self.filters: if not test(request, self): return False, [] # Trigger mock mappers for mapper in self.mappers: request = mapper(request, self) if not request: raise ValueError('map function must return a request object') # Match incoming request against registered mock matchers matches, errors = self.matchers.match(request) # If not matched, return False if not matches: return False, errors # Register matched request for further inspecion and reference self._calls.append(request) # Increase mock call counter self._matches += 1 if not self._persist: self._times -= 1 # Raise simulated error if self._error: raise self._error # Trigger callback when matched for callback in self.callbacks: callback(request, self) return True, []
[ "def", "match", "(", "self", ",", "request", ")", ":", "# If mock already expired, fail it", "if", "self", ".", "_times", "<=", "0", ":", "raise", "PookExpiredMock", "(", "'Mock expired'", ")", "# Trigger mock filters", "for", "test", "in", "self", ".", "filters...
Matches an outgoing HTTP request against the current mock matchers. This method acts like a delegator to `pook.MatcherEngine`. Arguments: request (pook.Request): request instance to match. Raises: Exception: if the mock has an exception defined. Returns: tuple(bool, list[Exception]): ``True`` if the mock matches the outgoing HTTP request, otherwise ``False``. Also returns an optional list of error exceptions.
[ "Matches", "an", "outgoing", "HTTP", "request", "against", "the", "current", "mock", "matchers", "." ]
python
test
mzucker/noteshrink
noteshrink.py
https://github.com/mzucker/noteshrink/blob/7d876e5b43923c6bf8d64b7ef18f6855bfb30ce3/noteshrink.py#L431-L456
def save(output_filename, labels, palette, dpi, options): '''Save the label/palette pair out as an indexed PNG image. This optionally saturates the pallete by mapping the smallest color component to zero and the largest one to 255, and also optionally sets the background color to pure white. ''' if not options.quiet: print(' saving {}...'.format(output_filename)) if options.saturate: palette = palette.astype(np.float32) pmin = palette.min() pmax = palette.max() palette = 255 * (palette - pmin)/(pmax-pmin) palette = palette.astype(np.uint8) if options.white_bg: palette = palette.copy() palette[0] = (255, 255, 255) output_img = Image.fromarray(labels, 'P') output_img.putpalette(palette.flatten()) output_img.save(output_filename, dpi=dpi)
[ "def", "save", "(", "output_filename", ",", "labels", ",", "palette", ",", "dpi", ",", "options", ")", ":", "if", "not", "options", ".", "quiet", ":", "print", "(", "' saving {}...'", ".", "format", "(", "output_filename", ")", ")", "if", "options", "."...
Save the label/palette pair out as an indexed PNG image. This optionally saturates the pallete by mapping the smallest color component to zero and the largest one to 255, and also optionally sets the background color to pure white.
[ "Save", "the", "label", "/", "palette", "pair", "out", "as", "an", "indexed", "PNG", "image", ".", "This", "optionally", "saturates", "the", "pallete", "by", "mapping", "the", "smallest", "color", "component", "to", "zero", "and", "the", "largest", "one", ...
python
train
alpha-xone/xbbg
xbbg/blp.py
https://github.com/alpha-xone/xbbg/blob/70226eb19a72a08144b5d8cea9db4913200f7bc5/xbbg/blp.py#L439-L512
def dividend( tickers, typ='all', start_date=None, end_date=None, **kwargs ) -> pd.DataFrame: """ Bloomberg dividend / split history Args: tickers: list of tickers typ: dividend adjustment type `all`: `DVD_Hist_All` `dvd`: `DVD_Hist` `split`: `Eqy_DVD_Hist_Splits` `gross`: `Eqy_DVD_Hist_Gross` `adjust`: `Eqy_DVD_Adjust_Fact` `adj_fund`: `Eqy_DVD_Adj_Fund` `with_amt`: `DVD_Hist_All_with_Amt_Status` `dvd_amt`: `DVD_Hist_with_Amt_Status` `gross_amt`: `DVD_Hist_Gross_with_Amt_Stat` `projected`: `BDVD_Pr_Ex_Dts_DVD_Amts_w_Ann` start_date: start date end_date: end date **kwargs: overrides Returns: pd.DataFrame Examples: >>> res = dividend( ... tickers=['C US Equity', 'NVDA US Equity', 'MS US Equity'], ... start_date='2018-01-01', end_date='2018-05-01' ... ) >>> res.index.name = None >>> res.loc[:, ['ex_date', 'rec_date', 'dvd_amt']].round(2) ex_date rec_date dvd_amt C US Equity 2018-02-02 2018-02-05 0.32 MS US Equity 2018-04-27 2018-04-30 0.25 MS US Equity 2018-01-30 2018-01-31 0.25 NVDA US Equity 2018-02-22 2018-02-23 0.15 """ if isinstance(tickers, str): tickers = [tickers] tickers = [t for t in tickers if ('Equity' in t) and ('=' not in t)] fld = { 'all': 'DVD_Hist_All', 'dvd': 'DVD_Hist', 'split': 'Eqy_DVD_Hist_Splits', 'gross': 'Eqy_DVD_Hist_Gross', 'adjust': 'Eqy_DVD_Adjust_Fact', 'adj_fund': 'Eqy_DVD_Adj_Fund', 'with_amt': 'DVD_Hist_All_with_Amt_Status', 'dvd_amt': 'DVD_Hist_with_Amt_Status', 'gross_amt': 'DVD_Hist_Gross_with_Amt_Stat', 'projected': 'BDVD_Pr_Ex_Dts_DVD_Amts_w_Ann', }.get(typ, typ) if (fld == 'Eqy_DVD_Adjust_Fact') and ('Corporate_Actions_Filter' not in kwargs): kwargs['Corporate_Actions_Filter'] = 'NORMAL_CASH|ABNORMAL_CASH|CAPITAL_CHANGE' if fld in [ 'DVD_Hist_All', 'DVD_Hist', 'Eqy_DVD_Hist_Gross', 'DVD_Hist_All_with_Amt_Status', 'DVD_Hist_with_Amt_Status', ]: if start_date: kwargs['DVD_Start_Dt'] = utils.fmt_dt(start_date, fmt='%Y%m%d') if end_date: kwargs['DVD_End_Dt'] = utils.fmt_dt(end_date, fmt='%Y%m%d') kwargs['col_maps'] = { 'Declared Date': 'dec_date', 'Ex-Date': 'ex_date', 'Record Date': 'rec_date', 'Payable Date': 'pay_date', 'Dividend Amount': 'dvd_amt', 'Dividend Frequency': 'dvd_freq', 'Dividend Type': 'dvd_type', 'Amount Status': 'amt_status', 'Adjustment Date': 'adj_date', 'Adjustment Factor': 'adj_factor', 'Adjustment Factor Operator Type': 'adj_op', 'Adjustment Factor Flag': 'adj_flag', 'Amount Per Share': 'amt_ps', 'Projected/Confirmed': 'category', } return bds(tickers=tickers, flds=fld, raw=False, **kwargs)
[ "def", "dividend", "(", "tickers", ",", "typ", "=", "'all'", ",", "start_date", "=", "None", ",", "end_date", "=", "None", ",", "*", "*", "kwargs", ")", "->", "pd", ".", "DataFrame", ":", "if", "isinstance", "(", "tickers", ",", "str", ")", ":", "t...
Bloomberg dividend / split history Args: tickers: list of tickers typ: dividend adjustment type `all`: `DVD_Hist_All` `dvd`: `DVD_Hist` `split`: `Eqy_DVD_Hist_Splits` `gross`: `Eqy_DVD_Hist_Gross` `adjust`: `Eqy_DVD_Adjust_Fact` `adj_fund`: `Eqy_DVD_Adj_Fund` `with_amt`: `DVD_Hist_All_with_Amt_Status` `dvd_amt`: `DVD_Hist_with_Amt_Status` `gross_amt`: `DVD_Hist_Gross_with_Amt_Stat` `projected`: `BDVD_Pr_Ex_Dts_DVD_Amts_w_Ann` start_date: start date end_date: end date **kwargs: overrides Returns: pd.DataFrame Examples: >>> res = dividend( ... tickers=['C US Equity', 'NVDA US Equity', 'MS US Equity'], ... start_date='2018-01-01', end_date='2018-05-01' ... ) >>> res.index.name = None >>> res.loc[:, ['ex_date', 'rec_date', 'dvd_amt']].round(2) ex_date rec_date dvd_amt C US Equity 2018-02-02 2018-02-05 0.32 MS US Equity 2018-04-27 2018-04-30 0.25 MS US Equity 2018-01-30 2018-01-31 0.25 NVDA US Equity 2018-02-22 2018-02-23 0.15
[ "Bloomberg", "dividend", "/", "split", "history" ]
python
valid
apple/turicreate
deps/src/cmake-3.13.4/Source/cmConvertMSBuildXMLToJSON.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/deps/src/cmake-3.13.4/Source/cmConvertMSBuildXMLToJSON.py#L252-L270
def __convert_bool(node): """Converts an BoolProperty node to JSON format.""" converted = __convert_node(node, default_value='true') # Check for a switch for reversing the value reverse_switch = __get_attribute(node, 'ReverseSwitch') if reverse_switch: converted_reverse = copy.deepcopy(converted) converted_reverse['switch'] = reverse_switch converted_reverse['value'] = 'false' return [converted_reverse, converted] # Modify flags when there is an argument child __with_argument(node, converted) return __check_for_flag(converted)
[ "def", "__convert_bool", "(", "node", ")", ":", "converted", "=", "__convert_node", "(", "node", ",", "default_value", "=", "'true'", ")", "# Check for a switch for reversing the value", "reverse_switch", "=", "__get_attribute", "(", "node", ",", "'ReverseSwitch'", ")...
Converts an BoolProperty node to JSON format.
[ "Converts", "an", "BoolProperty", "node", "to", "JSON", "format", "." ]
python
train
realestate-com-au/dashmat
dashmat/core_modules/splunk/splunk-sdk-1.3.0/splunklib/searchcommands/search_command_internals.py
https://github.com/realestate-com-au/dashmat/blob/433886e52698f0ddb9956f087b76041966c3bcd1/dashmat/core_modules/splunk/splunk-sdk-1.3.0/splunklib/searchcommands/search_command_internals.py#L183-L187
def append(self, message_level, message_text): """ Adds a message level/text pair to this MessagesHeader """ if not message_level in MessagesHeader._message_levels: raise ValueError('message_level="%s"' % message_level) self._messages.append((message_level, message_text))
[ "def", "append", "(", "self", ",", "message_level", ",", "message_text", ")", ":", "if", "not", "message_level", "in", "MessagesHeader", ".", "_message_levels", ":", "raise", "ValueError", "(", "'message_level=\"%s\"'", "%", "message_level", ")", "self", ".", "_...
Adds a message level/text pair to this MessagesHeader
[ "Adds", "a", "message", "level", "/", "text", "pair", "to", "this", "MessagesHeader" ]
python
train
gmdzy2010/dingtalk_sdk_gmdzy2010
dingtalk_sdk_gmdzy2010/message_request.py
https://github.com/gmdzy2010/dingtalk_sdk_gmdzy2010/blob/b06cb1f78f89be9554dcb6101af8bc72718a9ecd/dingtalk_sdk_gmdzy2010/message_request.py#L192-L196
def get_read_user_ids(self): """Method to get chatid of group created.""" read_user_ids = self.json_response.get("readUserIdList", None) self.logger.info("%s\t%s" % (self.request_method, self.request_url)) return read_user_ids
[ "def", "get_read_user_ids", "(", "self", ")", ":", "read_user_ids", "=", "self", ".", "json_response", ".", "get", "(", "\"readUserIdList\"", ",", "None", ")", "self", ".", "logger", ".", "info", "(", "\"%s\\t%s\"", "%", "(", "self", ".", "request_method", ...
Method to get chatid of group created.
[ "Method", "to", "get", "chatid", "of", "group", "created", "." ]
python
train
dmwm/DBS
Server/Python/src/dbs/business/DBSAcquisitionEra.py
https://github.com/dmwm/DBS/blob/9619bafce3783b3e77f0415f8f9a258e33dd1e6f/Server/Python/src/dbs/business/DBSAcquisitionEra.py#L42-L55
def listAcquisitionEras_CI(self, acq=''): """ Returns all acquistion eras in dbs """ try: acq = str(acq) except: dbsExceptionHandler('dbsException-invalid-input', 'aquistion_era_name given is not valid : %s'%acq) conn = self.dbi.connection() try: result = self.acqlst_ci.execute(conn, acq) return result finally: if conn:conn.close()
[ "def", "listAcquisitionEras_CI", "(", "self", ",", "acq", "=", "''", ")", ":", "try", ":", "acq", "=", "str", "(", "acq", ")", "except", ":", "dbsExceptionHandler", "(", "'dbsException-invalid-input'", ",", "'aquistion_era_name given is not valid : %s'", "%", "acq...
Returns all acquistion eras in dbs
[ "Returns", "all", "acquistion", "eras", "in", "dbs" ]
python
train
fusepy/fusepy
fusell.py
https://github.com/fusepy/fusepy/blob/5d997d6706cc0204e1b3ca679651485a7e7dda49/fusell.py#L816-L823
def write(self, req, ino, buf, off, fi): """Write data Valid replies: reply_write reply_err """ self.reply_err(req, errno.EROFS)
[ "def", "write", "(", "self", ",", "req", ",", "ino", ",", "buf", ",", "off", ",", "fi", ")", ":", "self", ".", "reply_err", "(", "req", ",", "errno", ".", "EROFS", ")" ]
Write data Valid replies: reply_write reply_err
[ "Write", "data" ]
python
train
twilio/twilio-python
twilio/rest/taskrouter/v1/workspace/activity.py
https://github.com/twilio/twilio-python/blob/c867895f55dcc29f522e6e8b8868d0d18483132f/twilio/rest/taskrouter/v1/workspace/activity.py#L137-L155
def create(self, friendly_name, available=values.unset): """ Create a new ActivityInstance :param unicode friendly_name: A human-readable name for the Activity, such as 'On Call', 'Break', 'Email', etc. :param bool available: Boolean value indicating whether the worker should be eligible to receive a Task when they occupy this Activity. :returns: Newly created ActivityInstance :rtype: twilio.rest.taskrouter.v1.workspace.activity.ActivityInstance """ data = values.of({'FriendlyName': friendly_name, 'Available': available, }) payload = self._version.create( 'POST', self._uri, data=data, ) return ActivityInstance(self._version, payload, workspace_sid=self._solution['workspace_sid'], )
[ "def", "create", "(", "self", ",", "friendly_name", ",", "available", "=", "values", ".", "unset", ")", ":", "data", "=", "values", ".", "of", "(", "{", "'FriendlyName'", ":", "friendly_name", ",", "'Available'", ":", "available", ",", "}", ")", "payload...
Create a new ActivityInstance :param unicode friendly_name: A human-readable name for the Activity, such as 'On Call', 'Break', 'Email', etc. :param bool available: Boolean value indicating whether the worker should be eligible to receive a Task when they occupy this Activity. :returns: Newly created ActivityInstance :rtype: twilio.rest.taskrouter.v1.workspace.activity.ActivityInstance
[ "Create", "a", "new", "ActivityInstance" ]
python
train
chaoss/grimoirelab-manuscripts
manuscripts2/report.py
https://github.com/chaoss/grimoirelab-manuscripts/blob/94a3ad4f11bfbcd6c5190e01cb5d3e47a5187cd9/manuscripts2/report.py#L58-L71
def create_csv(filename, csv_data, mode="w"): """ Create a CSV file with the given data and store it in the file with the given name. :param filename: name of the file to store the data in :pram csv_data: the data to be stored in the file :param mode: the mode in which we have to open the file. It can be 'w', 'a', etc. Default is 'w' """ with open(filename, mode) as f: csv_data.replace("_", r"\_") f.write(csv_data)
[ "def", "create_csv", "(", "filename", ",", "csv_data", ",", "mode", "=", "\"w\"", ")", ":", "with", "open", "(", "filename", ",", "mode", ")", "as", "f", ":", "csv_data", ".", "replace", "(", "\"_\"", ",", "r\"\\_\"", ")", "f", ".", "write", "(", "...
Create a CSV file with the given data and store it in the file with the given name. :param filename: name of the file to store the data in :pram csv_data: the data to be stored in the file :param mode: the mode in which we have to open the file. It can be 'w', 'a', etc. Default is 'w'
[ "Create", "a", "CSV", "file", "with", "the", "given", "data", "and", "store", "it", "in", "the", "file", "with", "the", "given", "name", "." ]
python
train
saltstack/salt
salt/modules/lxd.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/lxd.py#L1113-L1217
def container_migrate(name, stop_and_start=False, remote_addr=None, cert=None, key=None, verify_cert=True, src_remote_addr=None, src_cert=None, src_key=None, src_verify_cert=None): ''' Migrate a container. If the container is running, it either must be shut down first (use stop_and_start=True) or criu must be installed on the source and destination machines. For this operation both certs need to be authenticated, use :mod:`lxd.authenticate <salt.modules.lxd.authenticate` to authenticate your cert(s). name : Name of the container to migrate stop_and_start : Stop the container on the source and start it on dest remote_addr : An URL to a remote Server, you also have to give cert and key if you provide remote_addr and its a TCP Address! Examples: https://myserver.lan:8443 /var/lib/mysocket.sock cert : PEM Formatted SSL Certificate. Examples: ~/.config/lxc/client.crt key : PEM Formatted SSL Key. Examples: ~/.config/lxc/client.key verify_cert : True Wherever to verify the cert, this is by default True but in the most cases you want to set it off as LXD normaly uses self-signed certificates. CLI Example: .. code-block:: bash # Authorize salt '*' lxd.authenticate https://srv01:8443 <yourpass> ~/.config/lxc/client.crt ~/.config/lxc/client.key false salt '*' lxd.authenticate https://srv02:8443 <yourpass> ~/.config/lxc/client.crt ~/.config/lxc/client.key false # Migrate phpmyadmin from srv01 to srv02 salt '*' lxd.container_migrate phpmyadmin stop_and_start=true remote_addr=https://srv02:8443 cert=~/.config/lxc/client.crt key=~/.config/lxc/client.key verify_cert=False src_remote_addr=https://srv01:8443 ''' if src_cert is None: src_cert = cert if src_key is None: src_key = key if src_verify_cert is None: src_verify_cert = verify_cert container = container_get( name, src_remote_addr, src_cert, src_key, src_verify_cert, _raw=True ) dest_client = pylxd_client_get( remote_addr, cert, key, verify_cert ) for pname in container.profiles: try: dest_client.profiles.get(pname) except pylxd.exceptions.LXDAPIException: raise SaltInvocationError( 'not all the profiles from the source exist on the target' ) was_running = container.status_code == CONTAINER_STATUS_RUNNING if stop_and_start and was_running: container.stop(wait=True) try: dest_container = container.migrate(dest_client, wait=True) dest_container.profiles = container.profiles dest_container.save() except pylxd.exceptions.LXDAPIException as e: raise CommandExecutionError(six.text_type(e)) # Remove the source container container.delete(wait=True) if stop_and_start and was_running: dest_container.start(wait=True) return _pylxd_model_to_dict(dest_container)
[ "def", "container_migrate", "(", "name", ",", "stop_and_start", "=", "False", ",", "remote_addr", "=", "None", ",", "cert", "=", "None", ",", "key", "=", "None", ",", "verify_cert", "=", "True", ",", "src_remote_addr", "=", "None", ",", "src_cert", "=", ...
Migrate a container. If the container is running, it either must be shut down first (use stop_and_start=True) or criu must be installed on the source and destination machines. For this operation both certs need to be authenticated, use :mod:`lxd.authenticate <salt.modules.lxd.authenticate` to authenticate your cert(s). name : Name of the container to migrate stop_and_start : Stop the container on the source and start it on dest remote_addr : An URL to a remote Server, you also have to give cert and key if you provide remote_addr and its a TCP Address! Examples: https://myserver.lan:8443 /var/lib/mysocket.sock cert : PEM Formatted SSL Certificate. Examples: ~/.config/lxc/client.crt key : PEM Formatted SSL Key. Examples: ~/.config/lxc/client.key verify_cert : True Wherever to verify the cert, this is by default True but in the most cases you want to set it off as LXD normaly uses self-signed certificates. CLI Example: .. code-block:: bash # Authorize salt '*' lxd.authenticate https://srv01:8443 <yourpass> ~/.config/lxc/client.crt ~/.config/lxc/client.key false salt '*' lxd.authenticate https://srv02:8443 <yourpass> ~/.config/lxc/client.crt ~/.config/lxc/client.key false # Migrate phpmyadmin from srv01 to srv02 salt '*' lxd.container_migrate phpmyadmin stop_and_start=true remote_addr=https://srv02:8443 cert=~/.config/lxc/client.crt key=~/.config/lxc/client.key verify_cert=False src_remote_addr=https://srv01:8443
[ "Migrate", "a", "container", "." ]
python
train
manns/pyspread
pyspread/src/gui/_widgets.py
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_widgets.py#L404-L429
def OnDrawBackground(self, dc, rect, item, flags): """Called for drawing the background area of each item Overridden from OwnerDrawnComboBox """ # If the item is selected, or its item is even, # or if we are painting the combo control itself # then use the default rendering. if (item & 1 == 0 or flags & (wx.combo.ODCB_PAINTING_CONTROL | wx.combo.ODCB_PAINTING_SELECTED)): try: wx.combo.OwnerDrawnComboBox.OnDrawBackground(self, dc, rect, item, flags) finally: return # Otherwise, draw every other background with # different color. bg_color = get_color(config["label_color"]) dc.SetBrush(wx.Brush(bg_color)) dc.SetPen(wx.Pen(bg_color)) dc.DrawRectangleRect(rect)
[ "def", "OnDrawBackground", "(", "self", ",", "dc", ",", "rect", ",", "item", ",", "flags", ")", ":", "# If the item is selected, or its item is even,", "# or if we are painting the combo control itself", "# then use the default rendering.", "if", "(", "item", "&", "1", "=...
Called for drawing the background area of each item Overridden from OwnerDrawnComboBox
[ "Called", "for", "drawing", "the", "background", "area", "of", "each", "item" ]
python
train
openeemeter/eeweather
eeweather/stations.py
https://github.com/openeemeter/eeweather/blob/d32b7369b26edfa3ee431c60457afeb0593123a7/eeweather/stations.py#L1151-L1168
def json(self): """ Return a JSON-serializeable object containing station metadata.""" return { "elevation": self.elevation, "latitude": self.latitude, "longitude": self.longitude, "icao_code": self.icao_code, "name": self.name, "quality": self.quality, "wban_ids": self.wban_ids, "recent_wban_id": self.recent_wban_id, "climate_zones": { "iecc_climate_zone": self.iecc_climate_zone, "iecc_moisture_regime": self.iecc_moisture_regime, "ba_climate_zone": self.ba_climate_zone, "ca_climate_zone": self.ca_climate_zone, }, }
[ "def", "json", "(", "self", ")", ":", "return", "{", "\"elevation\"", ":", "self", ".", "elevation", ",", "\"latitude\"", ":", "self", ".", "latitude", ",", "\"longitude\"", ":", "self", ".", "longitude", ",", "\"icao_code\"", ":", "self", ".", "icao_code"...
Return a JSON-serializeable object containing station metadata.
[ "Return", "a", "JSON", "-", "serializeable", "object", "containing", "station", "metadata", "." ]
python
train
dourvaris/nano-python
src/nano/rpc.py
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/rpc.py#L1598-L1680
def ledger( self, account, count=None, representative=False, weight=False, pending=False, sorting=False, ): """ Returns frontier, open block, change representative block, balance, last modified timestamp from local database & block count starting at **account** up to **count** .. enable_control required .. version 8.0 required :param account: Account to return blocks for :type account: str :param count: Max number of blocks to return :type count: int :param representative: If true, returns the representative as well :type representative: bool :param weight: If true, returns the voting weight as well :type weight: bool :param pending: If true, returns the pending amount as well :type pending: bool :param sorting: If true, sorts the response by balance :type sorting: bool :raises: :py:exc:`nano.rpc.RPCException` >>> rpc.ledger( ... account="xrb_1111111111111111111111111111111111111111111111111111hifc8npp", ... count=1 ... ) { "xrb_11119gbh8hb4hj1duf7fdtfyf5s75okzxdgupgpgm1bj78ex3kgy7frt3s9n": { "frontier": "E71AF3E9DD86BBD8B4620EFA63E065B34D358CFC091ACB4E103B965F95783321", "open_block": "643B77F1ECEFBDBE1CC909872964C1DBBE23A6149BD3CEF2B50B76044659B60F", "representative_block": "643B77F1ECEFBDBE1CC909872964C1DBBE23A6149BD3CEF2B50B76044659B60F", "balance": 0, "modified_timestamp": 1511476234, "block_count": 2 } } """ account = self._process_value(account, 'account') payload = {"account": account} if count is not None: payload['count'] = self._process_value(count, 'int') if sorting: payload['sorting'] = self._process_value(sorting, 'strbool') if representative: payload['representative'] = self._process_value(representative, 'strbool') if weight: payload['weight'] = self._process_value(weight, 'strbool') if pending: payload['pending'] = self._process_value(pending, 'strbool') resp = self.call('ledger', payload) accounts = resp.get('accounts') or {} int_keys = ('balance', 'modified_timestamp', 'block_count', 'weight', 'pending') for account, frontier in accounts.items(): for key in int_keys: if key in frontier: frontier[key] = int(frontier[key]) return accounts
[ "def", "ledger", "(", "self", ",", "account", ",", "count", "=", "None", ",", "representative", "=", "False", ",", "weight", "=", "False", ",", "pending", "=", "False", ",", "sorting", "=", "False", ",", ")", ":", "account", "=", "self", ".", "_proce...
Returns frontier, open block, change representative block, balance, last modified timestamp from local database & block count starting at **account** up to **count** .. enable_control required .. version 8.0 required :param account: Account to return blocks for :type account: str :param count: Max number of blocks to return :type count: int :param representative: If true, returns the representative as well :type representative: bool :param weight: If true, returns the voting weight as well :type weight: bool :param pending: If true, returns the pending amount as well :type pending: bool :param sorting: If true, sorts the response by balance :type sorting: bool :raises: :py:exc:`nano.rpc.RPCException` >>> rpc.ledger( ... account="xrb_1111111111111111111111111111111111111111111111111111hifc8npp", ... count=1 ... ) { "xrb_11119gbh8hb4hj1duf7fdtfyf5s75okzxdgupgpgm1bj78ex3kgy7frt3s9n": { "frontier": "E71AF3E9DD86BBD8B4620EFA63E065B34D358CFC091ACB4E103B965F95783321", "open_block": "643B77F1ECEFBDBE1CC909872964C1DBBE23A6149BD3CEF2B50B76044659B60F", "representative_block": "643B77F1ECEFBDBE1CC909872964C1DBBE23A6149BD3CEF2B50B76044659B60F", "balance": 0, "modified_timestamp": 1511476234, "block_count": 2 } }
[ "Returns", "frontier", "open", "block", "change", "representative", "block", "balance", "last", "modified", "timestamp", "from", "local", "database", "&", "block", "count", "starting", "at", "**", "account", "**", "up", "to", "**", "count", "**" ]
python
train
major/supernova
supernova/supernova.py
https://github.com/major/supernova/blob/4a217ae53c1c05567014b047c0b6b9dea2d383b3/supernova/supernova.py#L80-L88
def check_for_bypass_url(raw_creds, nova_args): """ Return a list of extra args that need to be passed on cmdline to nova. """ if 'BYPASS_URL' in raw_creds.keys(): bypass_args = ['--bypass-url', raw_creds['BYPASS_URL']] nova_args = bypass_args + nova_args return nova_args
[ "def", "check_for_bypass_url", "(", "raw_creds", ",", "nova_args", ")", ":", "if", "'BYPASS_URL'", "in", "raw_creds", ".", "keys", "(", ")", ":", "bypass_args", "=", "[", "'--bypass-url'", ",", "raw_creds", "[", "'BYPASS_URL'", "]", "]", "nova_args", "=", "b...
Return a list of extra args that need to be passed on cmdline to nova.
[ "Return", "a", "list", "of", "extra", "args", "that", "need", "to", "be", "passed", "on", "cmdline", "to", "nova", "." ]
python
train
readbeyond/aeneas
aeneas/textfile.py
https://github.com/readbeyond/aeneas/blob/9d95535ad63eef4a98530cfdff033b8c35315ee1/aeneas/textfile.py#L560-L573
def get_subtree(self, root): """ Return a new :class:`~aeneas.textfile.TextFile` object, rooted at the given node ``root``. :param root: the root node :type root: :class:`~aeneas.tree.Tree` :rtype: :class:`~aeneas.textfile.TextFile` """ if not isinstance(root, Tree): self.log_exc(u"root is not an instance of Tree", None, True, TypeError) new_text_file = TextFile() new_text_file.fragments_tree = root return new_text_file
[ "def", "get_subtree", "(", "self", ",", "root", ")", ":", "if", "not", "isinstance", "(", "root", ",", "Tree", ")", ":", "self", ".", "log_exc", "(", "u\"root is not an instance of Tree\"", ",", "None", ",", "True", ",", "TypeError", ")", "new_text_file", ...
Return a new :class:`~aeneas.textfile.TextFile` object, rooted at the given node ``root``. :param root: the root node :type root: :class:`~aeneas.tree.Tree` :rtype: :class:`~aeneas.textfile.TextFile`
[ "Return", "a", "new", ":", "class", ":", "~aeneas", ".", "textfile", ".", "TextFile", "object", "rooted", "at", "the", "given", "node", "root", "." ]
python
train
cogeotiff/rio-tiler
rio_tiler/cbers.py
https://github.com/cogeotiff/rio-tiler/blob/09bb0fc6cee556410477f016abbae172b12c46a6/rio_tiler/cbers.py#L115-L145
def bounds(sceneid): """ Retrieve image bounds. Attributes ---------- sceneid : str CBERS sceneid. Returns ------- out : dict dictionary with image bounds. """ scene_params = _cbers_parse_scene_id(sceneid) cbers_address = "{}/{}".format(CBERS_BUCKET, scene_params["key"]) with rasterio.open( "{}/{}_BAND{}.tif".format( cbers_address, sceneid, scene_params["reference_band"] ) ) as src: wgs_bounds = transform_bounds( *[src.crs, "epsg:4326"] + list(src.bounds), densify_pts=21 ) info = {"sceneid": sceneid} info["bounds"] = list(wgs_bounds) return info
[ "def", "bounds", "(", "sceneid", ")", ":", "scene_params", "=", "_cbers_parse_scene_id", "(", "sceneid", ")", "cbers_address", "=", "\"{}/{}\"", ".", "format", "(", "CBERS_BUCKET", ",", "scene_params", "[", "\"key\"", "]", ")", "with", "rasterio", ".", "open",...
Retrieve image bounds. Attributes ---------- sceneid : str CBERS sceneid. Returns ------- out : dict dictionary with image bounds.
[ "Retrieve", "image", "bounds", "." ]
python
train
log2timeline/dftimewolf
dftimewolf/lib/collectors/grr_hosts.py
https://github.com/log2timeline/dftimewolf/blob/45f898476a288d73c4256ae8e3836a2a4848c0d7/dftimewolf/lib/collectors/grr_hosts.py#L102-L115
def _get_client_by_id(self, client_id): """Get GRR client dictionary and make sure valid approvals exist. Args: client_id: GRR client ID. Returns: GRR API Client object """ client = self.grr_api.Client(client_id) print('Checking for client approval') self._check_approval_wrapper(client, client.ListFlows) print('{0:s}: Client approval is valid'.format(client_id)) return client.Get()
[ "def", "_get_client_by_id", "(", "self", ",", "client_id", ")", ":", "client", "=", "self", ".", "grr_api", ".", "Client", "(", "client_id", ")", "print", "(", "'Checking for client approval'", ")", "self", ".", "_check_approval_wrapper", "(", "client", ",", "...
Get GRR client dictionary and make sure valid approvals exist. Args: client_id: GRR client ID. Returns: GRR API Client object
[ "Get", "GRR", "client", "dictionary", "and", "make", "sure", "valid", "approvals", "exist", "." ]
python
train
marshmallow-code/marshmallow-jsonapi
marshmallow_jsonapi/schema.py
https://github.com/marshmallow-code/marshmallow-jsonapi/blob/7183c9bb5cdeace4143e6678bab48d433ac439a1/marshmallow_jsonapi/schema.py#L303-L332
def format_error(self, field_name, message, index=None): """Override-able hook to format a single error message as an Error object. See: http://jsonapi.org/format/#error-objects """ pointer = ['/data'] if index is not None: pointer.append(str(index)) relationship = isinstance( self.declared_fields.get(field_name), BaseRelationship, ) if relationship: pointer.append('relationships') elif field_name != 'id': # JSONAPI identifier is a special field that exists above the attribute object. pointer.append('attributes') pointer.append(self.inflect(field_name)) if relationship: pointer.append('data') return { 'detail': message, 'source': { 'pointer': '/'.join(pointer), }, }
[ "def", "format_error", "(", "self", ",", "field_name", ",", "message", ",", "index", "=", "None", ")", ":", "pointer", "=", "[", "'/data'", "]", "if", "index", "is", "not", "None", ":", "pointer", ".", "append", "(", "str", "(", "index", ")", ")", ...
Override-able hook to format a single error message as an Error object. See: http://jsonapi.org/format/#error-objects
[ "Override", "-", "able", "hook", "to", "format", "a", "single", "error", "message", "as", "an", "Error", "object", "." ]
python
train
gmdzy2010/dingtalk_sdk_gmdzy2010
dingtalk_sdk_gmdzy2010/base_request.py
https://github.com/gmdzy2010/dingtalk_sdk_gmdzy2010/blob/b06cb1f78f89be9554dcb6101af8bc72718a9ecd/dingtalk_sdk_gmdzy2010/base_request.py#L22-L35
def set_logger(self): """Method to build the base logging system. By default, logging level is set to INFO.""" logger = logging.getLogger(__name__) logger.setLevel(level=logging.INFO) logger_file = os.path.join(self.logs_path, 'dingtalk_sdk.logs') logger_handler = logging.FileHandler(logger_file) logger_handler.setLevel(logging.INFO) logger_formatter = logging.Formatter( '[%(asctime)s | %(name)s | %(levelname)s] %(message)s' ) logger_handler.setFormatter(logger_formatter) logger.addHandler(logger_handler) return logger
[ "def", "set_logger", "(", "self", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "logger", ".", "setLevel", "(", "level", "=", "logging", ".", "INFO", ")", "logger_file", "=", "os", ".", "path", ".", "join", "(", "self", ...
Method to build the base logging system. By default, logging level is set to INFO.
[ "Method", "to", "build", "the", "base", "logging", "system", ".", "By", "default", "logging", "level", "is", "set", "to", "INFO", "." ]
python
train
iotile/coretools
iotilecore/iotile/core/hw/virtual/tile_based_device.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/virtual/tile_based_device.py#L53-L62
def stop(self): """Stop running this virtual device including any worker threads.""" for tile in self._tiles.values(): tile.signal_stop() for tile in self._tiles.values(): tile.wait_stopped() super(TileBasedVirtualDevice, self).stop()
[ "def", "stop", "(", "self", ")", ":", "for", "tile", "in", "self", ".", "_tiles", ".", "values", "(", ")", ":", "tile", ".", "signal_stop", "(", ")", "for", "tile", "in", "self", ".", "_tiles", ".", "values", "(", ")", ":", "tile", ".", "wait_sto...
Stop running this virtual device including any worker threads.
[ "Stop", "running", "this", "virtual", "device", "including", "any", "worker", "threads", "." ]
python
train
p3trus/slave
slave/quantum_design/ppms.py
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/quantum_design/ppms.py#L520-L527
def redefine_position(self, position): """Redefines the current position to the new position. :param position: The new position. """ cmd = 'MOVE', [Float, Integer] self._write(cmd, position, 2)
[ "def", "redefine_position", "(", "self", ",", "position", ")", ":", "cmd", "=", "'MOVE'", ",", "[", "Float", ",", "Integer", "]", "self", ".", "_write", "(", "cmd", ",", "position", ",", "2", ")" ]
Redefines the current position to the new position. :param position: The new position.
[ "Redefines", "the", "current", "position", "to", "the", "new", "position", "." ]
python
train
apache/airflow
airflow/contrib/hooks/opsgenie_alert_hook.py
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/opsgenie_alert_hook.py#L50-L59
def _get_api_key(self): """ Get Opsgenie api_key for creating alert """ conn = self.get_connection(self.http_conn_id) api_key = conn.password if not api_key: raise AirflowException('Opsgenie API Key is required for this hook, ' 'please check your conn_id configuration.') return api_key
[ "def", "_get_api_key", "(", "self", ")", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "http_conn_id", ")", "api_key", "=", "conn", ".", "password", "if", "not", "api_key", ":", "raise", "AirflowException", "(", "'Opsgenie API Key is requi...
Get Opsgenie api_key for creating alert
[ "Get", "Opsgenie", "api_key", "for", "creating", "alert" ]
python
test
andreikop/qutepart
qutepart/sideareas.py
https://github.com/andreikop/qutepart/blob/109d76b239751318bcef06f39b2fbbf18687a40b/qutepart/sideareas.py#L45-L76
def paintEvent(self, event): """QWidget.paintEvent() implementation """ painter = QPainter(self) painter.fillRect(event.rect(), self.palette().color(QPalette.Window)) painter.setPen(Qt.black) block = self._qpart.firstVisibleBlock() blockNumber = block.blockNumber() top = int(self._qpart.blockBoundingGeometry(block).translated(self._qpart.contentOffset()).top()) bottom = top + int(self._qpart.blockBoundingRect(block).height()) singleBlockHeight = self._qpart.cursorRect().height() boundingRect = self._qpart.blockBoundingRect(block) availableWidth = self.__width - self._RIGHT_MARGIN - self._LEFT_MARGIN availableHeight = self._qpart.fontMetrics().height() while block.isValid() and top <= event.rect().bottom(): if block.isVisible() and bottom >= event.rect().top(): number = str(blockNumber + 1) painter.drawText(self._LEFT_MARGIN, top, availableWidth, availableHeight, Qt.AlignRight, number) if boundingRect.height() >= singleBlockHeight * 2: # wrapped block painter.fillRect(1, top + singleBlockHeight, self.__width - 2, boundingRect.height() - singleBlockHeight - 2, Qt.darkGreen) block = block.next() boundingRect = self._qpart.blockBoundingRect(block) top = bottom bottom = top + int(boundingRect.height()) blockNumber += 1
[ "def", "paintEvent", "(", "self", ",", "event", ")", ":", "painter", "=", "QPainter", "(", "self", ")", "painter", ".", "fillRect", "(", "event", ".", "rect", "(", ")", ",", "self", ".", "palette", "(", ")", ".", "color", "(", "QPalette", ".", "Win...
QWidget.paintEvent() implementation
[ "QWidget", ".", "paintEvent", "()", "implementation" ]
python
train
ToucanToco/toucan-data-sdk
toucan_data_sdk/utils/postprocess/text.py
https://github.com/ToucanToco/toucan-data-sdk/blob/c3ca874e1b64f4bdcc2edda750a72d45d1561d8a/toucan_data_sdk/utils/postprocess/text.py#L452-L479
def concat( df, *, columns: List[str], new_column: str, sep: str = None ): """ Concatenate `columns` element-wise See [pandas doc]( https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.Series.str.cat.html) for more information --- ### Parameters *mandatory :* - `columns` (*list*): list of columns to concatenate (at least 2 columns) - `new_column` (*str*): the destination column *optional :* - `sep` (*str*): the separator """ if len(columns) < 2: raise ValueError('The `columns` parameter needs to have at least 2 columns') first_col, *other_cols = columns df.loc[:, new_column] = df[first_col].astype(str).str.cat(df[other_cols].astype(str), sep=sep) return df
[ "def", "concat", "(", "df", ",", "*", ",", "columns", ":", "List", "[", "str", "]", ",", "new_column", ":", "str", ",", "sep", ":", "str", "=", "None", ")", ":", "if", "len", "(", "columns", ")", "<", "2", ":", "raise", "ValueError", "(", "'The...
Concatenate `columns` element-wise See [pandas doc]( https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.Series.str.cat.html) for more information --- ### Parameters *mandatory :* - `columns` (*list*): list of columns to concatenate (at least 2 columns) - `new_column` (*str*): the destination column *optional :* - `sep` (*str*): the separator
[ "Concatenate", "columns", "element", "-", "wise", "See", "[", "pandas", "doc", "]", "(", "https", ":", "//", "pandas", ".", "pydata", ".", "org", "/", "pandas", "-", "docs", "/", "stable", "/", "reference", "/", "api", "/", "pandas", ".", "Series", "...
python
test
mozilla-releng/scriptworker
scriptworker/github.py
https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/github.py#L141-L160
def extract_github_repo_owner_and_name(url): """Given an URL, return the repo name and who owns it. Args: url (str): The URL to the GitHub repository Raises: ValueError: on url that aren't from github Returns: str, str: the owner of the repository, the repository name """ _check_github_url_is_supported(url) parts = get_parts_of_url_path(url) repo_owner = parts[0] repo_name = parts[1] return repo_owner, _strip_trailing_dot_git(repo_name)
[ "def", "extract_github_repo_owner_and_name", "(", "url", ")", ":", "_check_github_url_is_supported", "(", "url", ")", "parts", "=", "get_parts_of_url_path", "(", "url", ")", "repo_owner", "=", "parts", "[", "0", "]", "repo_name", "=", "parts", "[", "1", "]", "...
Given an URL, return the repo name and who owns it. Args: url (str): The URL to the GitHub repository Raises: ValueError: on url that aren't from github Returns: str, str: the owner of the repository, the repository name
[ "Given", "an", "URL", "return", "the", "repo", "name", "and", "who", "owns", "it", "." ]
python
train
apache/incubator-superset
superset/views/core.py
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/core.py#L1564-L1619
def tables(self, db_id, schema, substr, force_refresh='false'): """Endpoint to fetch the list of tables for given database""" db_id = int(db_id) force_refresh = force_refresh.lower() == 'true' schema = utils.js_string_to_python(schema) substr = utils.js_string_to_python(substr) database = db.session.query(models.Database).filter_by(id=db_id).one() if schema: table_names = database.all_table_names_in_schema( schema=schema, force=force_refresh, cache=database.table_cache_enabled, cache_timeout=database.table_cache_timeout) view_names = database.all_view_names_in_schema( schema=schema, force=force_refresh, cache=database.table_cache_enabled, cache_timeout=database.table_cache_timeout) else: table_names = database.all_table_names_in_database( cache=True, force=False, cache_timeout=24 * 60 * 60) view_names = database.all_view_names_in_database( cache=True, force=False, cache_timeout=24 * 60 * 60) table_names = security_manager.accessible_by_user(database, table_names, schema) view_names = security_manager.accessible_by_user(database, view_names, schema) if substr: table_names = [tn for tn in table_names if substr in tn] view_names = [vn for vn in view_names if substr in vn] if not schema and database.default_schemas: def get_schema(tbl_or_view_name): return tbl_or_view_name.split('.')[0] if '.' in tbl_or_view_name else None user_schema = g.user.email.split('@')[0] valid_schemas = set(database.default_schemas + [user_schema]) table_names = [tn for tn in table_names if get_schema(tn) in valid_schemas] view_names = [vn for vn in view_names if get_schema(vn) in valid_schemas] max_items = config.get('MAX_TABLE_NAMES') or len(table_names) total_items = len(table_names) + len(view_names) max_tables = len(table_names) max_views = len(view_names) if total_items and substr: max_tables = max_items * len(table_names) // total_items max_views = max_items * len(view_names) // total_items table_options = [{'value': tn, 'label': tn} for tn in table_names[:max_tables]] table_options.extend([{'value': vn, 'label': '[view] {}'.format(vn)} for vn in view_names[:max_views]]) payload = { 'tableLength': len(table_names) + len(view_names), 'options': table_options, } return json_success(json.dumps(payload))
[ "def", "tables", "(", "self", ",", "db_id", ",", "schema", ",", "substr", ",", "force_refresh", "=", "'false'", ")", ":", "db_id", "=", "int", "(", "db_id", ")", "force_refresh", "=", "force_refresh", ".", "lower", "(", ")", "==", "'true'", "schema", "...
Endpoint to fetch the list of tables for given database
[ "Endpoint", "to", "fetch", "the", "list", "of", "tables", "for", "given", "database" ]
python
train
rossant/ipymd
ipymd/lib/opendocument.py
https://github.com/rossant/ipymd/blob/d87c9ebc59d67fe78b0139ee00e0e5307682e303/ipymd/lib/opendocument.py#L358-L361
def add_styles(self, **styles): """Add ODF styles to the current document.""" for stylename in sorted(styles): self._doc.styles.addElement(styles[stylename])
[ "def", "add_styles", "(", "self", ",", "*", "*", "styles", ")", ":", "for", "stylename", "in", "sorted", "(", "styles", ")", ":", "self", ".", "_doc", ".", "styles", ".", "addElement", "(", "styles", "[", "stylename", "]", ")" ]
Add ODF styles to the current document.
[ "Add", "ODF", "styles", "to", "the", "current", "document", "." ]
python
train
google/grr
grr/server/grr_response_server/gui/api_plugins/cron.py
https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/gui/api_plugins/cron.py#L76-L82
def _IsCronJobFailing(self, cron_job): """Returns True if the last run failed.""" status = cron_job.Get(cron_job.Schema.LAST_RUN_STATUS) if status is None: return False return status.status != rdf_cronjobs.CronJobRunStatus.Status.OK
[ "def", "_IsCronJobFailing", "(", "self", ",", "cron_job", ")", ":", "status", "=", "cron_job", ".", "Get", "(", "cron_job", ".", "Schema", ".", "LAST_RUN_STATUS", ")", "if", "status", "is", "None", ":", "return", "False", "return", "status", ".", "status",...
Returns True if the last run failed.
[ "Returns", "True", "if", "the", "last", "run", "failed", "." ]
python
train
Games-and-Simulations/sc-docker
scbw/docker_utils.py
https://github.com/Games-and-Simulations/sc-docker/blob/1d7adb9b5839783655564afc4bbcd204a0055dcb/scbw/docker_utils.py#L367-L373
def remove_game_containers(name_filter: str) -> None: """ :raises docker.exceptions.APIError """ for container in docker_client.containers.list(filters={"name": name_filter}, all=True): container.stop() container.remove()
[ "def", "remove_game_containers", "(", "name_filter", ":", "str", ")", "->", "None", ":", "for", "container", "in", "docker_client", ".", "containers", ".", "list", "(", "filters", "=", "{", "\"name\"", ":", "name_filter", "}", ",", "all", "=", "True", ")",...
:raises docker.exceptions.APIError
[ ":", "raises", "docker", ".", "exceptions", ".", "APIError" ]
python
train
twisted/txaws
txaws/ec2/client.py
https://github.com/twisted/txaws/blob/5c3317376cd47e536625027e38c3b37840175ce0/txaws/ec2/client.py#L905-L921
def describe_keypairs(self, xml_bytes): """Parse the XML returned by the C{DescribeKeyPairs} function. @param xml_bytes: XML bytes with a C{DescribeKeyPairsResponse} root element. @return: a C{list} of L{Keypair}. """ results = [] root = XML(xml_bytes) keypairs = root.find("keySet") if keypairs is None: return results for keypair_data in keypairs: key_name = keypair_data.findtext("keyName") key_fingerprint = keypair_data.findtext("keyFingerprint") results.append(model.Keypair(key_name, key_fingerprint)) return results
[ "def", "describe_keypairs", "(", "self", ",", "xml_bytes", ")", ":", "results", "=", "[", "]", "root", "=", "XML", "(", "xml_bytes", ")", "keypairs", "=", "root", ".", "find", "(", "\"keySet\"", ")", "if", "keypairs", "is", "None", ":", "return", "resu...
Parse the XML returned by the C{DescribeKeyPairs} function. @param xml_bytes: XML bytes with a C{DescribeKeyPairsResponse} root element. @return: a C{list} of L{Keypair}.
[ "Parse", "the", "XML", "returned", "by", "the", "C", "{", "DescribeKeyPairs", "}", "function", "." ]
python
train
apache/airflow
airflow/contrib/hooks/azure_fileshare_hook.py
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_fileshare_hook.py#L40-L45
def get_conn(self): """Return the FileService object.""" conn = self.get_connection(self.conn_id) service_options = conn.extra_dejson return FileService(account_name=conn.login, account_key=conn.password, **service_options)
[ "def", "get_conn", "(", "self", ")", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "conn_id", ")", "service_options", "=", "conn", ".", "extra_dejson", "return", "FileService", "(", "account_name", "=", "conn", ".", "login", ",", "acco...
Return the FileService object.
[ "Return", "the", "FileService", "object", "." ]
python
test
callowayproject/django-categories
categories/models.py
https://github.com/callowayproject/django-categories/blob/3765851320a79b12c6d3306f3784a2302ea64812/categories/models.py#L56-L67
def get_absolute_url(self): """Return a path""" from django.urls import NoReverseMatch if self.alternate_url: return self.alternate_url try: prefix = reverse('categories_tree_list') except NoReverseMatch: prefix = '/' ancestors = list(self.get_ancestors()) + [self, ] return prefix + '/'.join([force_text(i.slug) for i in ancestors]) + '/'
[ "def", "get_absolute_url", "(", "self", ")", ":", "from", "django", ".", "urls", "import", "NoReverseMatch", "if", "self", ".", "alternate_url", ":", "return", "self", ".", "alternate_url", "try", ":", "prefix", "=", "reverse", "(", "'categories_tree_list'", "...
Return a path
[ "Return", "a", "path" ]
python
train
Qiskit/qiskit-terra
qiskit/quantum_info/analyzation/average.py
https://github.com/Qiskit/qiskit-terra/blob/d4f58d903bc96341b816f7c35df936d6421267d1/qiskit/quantum_info/analyzation/average.py#L13-L38
def average_data(counts, observable): """Compute the mean value of an diagonal observable. Takes in a diagonal observable in dictionary, list or matrix format and then calculates the sum_i value(i) P(i) where value(i) is the value of the observable for state i. Args: counts (dict): a dict of outcomes from an experiment observable (dict or matrix or list): The observable to be averaged over. As an example, ZZ on qubits can be given as: * dict: {"00": 1, "11": 1, "01": -1, "10": -1} * matrix: [[1, 0, 0, 0], [0, -1, 0, 0, ], [0, 0, -1, 0], [0, 0, 0, 1]] * matrix diagonal (list): [1, -1, -1, 1] Returns: Double: Average of the observable """ if not isinstance(observable, dict): observable = make_dict_observable(observable) temp = 0 tot = sum(counts.values()) for key in counts: if key in observable: temp += counts[key] * observable[key] / tot return temp
[ "def", "average_data", "(", "counts", ",", "observable", ")", ":", "if", "not", "isinstance", "(", "observable", ",", "dict", ")", ":", "observable", "=", "make_dict_observable", "(", "observable", ")", "temp", "=", "0", "tot", "=", "sum", "(", "counts", ...
Compute the mean value of an diagonal observable. Takes in a diagonal observable in dictionary, list or matrix format and then calculates the sum_i value(i) P(i) where value(i) is the value of the observable for state i. Args: counts (dict): a dict of outcomes from an experiment observable (dict or matrix or list): The observable to be averaged over. As an example, ZZ on qubits can be given as: * dict: {"00": 1, "11": 1, "01": -1, "10": -1} * matrix: [[1, 0, 0, 0], [0, -1, 0, 0, ], [0, 0, -1, 0], [0, 0, 0, 1]] * matrix diagonal (list): [1, -1, -1, 1] Returns: Double: Average of the observable
[ "Compute", "the", "mean", "value", "of", "an", "diagonal", "observable", "." ]
python
test
PyThaiNLP/pythainlp
pythainlp/util/keyboard.py
https://github.com/PyThaiNLP/pythainlp/blob/e9a300b8a99dfd1a67a955e7c06f62e4afe0fbca/pythainlp/util/keyboard.py#L104-L113
def eng_to_thai(text: str) -> str: """ Correct text in one language that is incorrectly-typed with a keyboard layout in another language. (type Thai with English keyboard) :param str text: Incorrect input (type Thai with English keyboard) :return: Thai text """ return "".join( [EN_TH_KEYB_PAIRS[ch] if (ch in EN_TH_KEYB_PAIRS) else ch for ch in text] )
[ "def", "eng_to_thai", "(", "text", ":", "str", ")", "->", "str", ":", "return", "\"\"", ".", "join", "(", "[", "EN_TH_KEYB_PAIRS", "[", "ch", "]", "if", "(", "ch", "in", "EN_TH_KEYB_PAIRS", ")", "else", "ch", "for", "ch", "in", "text", "]", ")" ]
Correct text in one language that is incorrectly-typed with a keyboard layout in another language. (type Thai with English keyboard) :param str text: Incorrect input (type Thai with English keyboard) :return: Thai text
[ "Correct", "text", "in", "one", "language", "that", "is", "incorrectly", "-", "typed", "with", "a", "keyboard", "layout", "in", "another", "language", ".", "(", "type", "Thai", "with", "English", "keyboard", ")" ]
python
train
troeger/opensubmit
web/opensubmit/cmdline.py
https://github.com/troeger/opensubmit/blob/384a95b7c6fa41e3f949a129d25dafd9a1c54859/web/opensubmit/cmdline.py#L294-L386
def console_script(fsroot=''): ''' The main entry point for the production administration script 'opensubmit-web'. The argument allows the test suite to override the root of all paths used in here. ''' parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter, description='Administration for the OpenSubmit web application.') parser.add_argument('-c', '--config', default='/etc/opensubmit/settings.ini', help='OpenSubmit configuration file.') subparsers = parser.add_subparsers(dest='command', help='Supported administrative actions.') parser_configcreate = subparsers.add_parser('configcreate', help='Create initial config files for the OpenSubmit web server.') parser_configcreate.add_argument('--debug', default=is_str_true(os.environ.get('OPENSUBMIT_DEBUG', 'False')), action='store_true', help='Enable debug mode, not for production systems.') parser_configcreate.add_argument('--server_url', default=os.environ.get('OPENSUBMIT_SERVER_URL', 'http://localhost:8000'), help='The main URL of the OpenSubmit installation, including sub-directories.') parser_configcreate.add_argument('--server_mediaroot', default=os.environ.get('OPENSUBMIT_SERVER_MEDIAROOT', '/tmp/'), help='Storage path for uploadeded files.') parser_configcreate.add_argument('--server_hostaliases', default=os.environ.get('OPENSUBMIT_SERVER_HOSTALIASES', '127.0.0.1'), help='Comma-separated list of alternative host names for the web server.') parser_configcreate.add_argument('--server_logfile', default=os.environ.get('OPENSUBMIT_SERVER_LOGFILE', '/tmp/opensubmit.log'), help='Log file for the OpenSubmit application.') parser_configcreate.add_argument('--server_timezone', default=os.environ.get('OPENSUBMIT_SERVER_TIMEZONE', 'Europe/Berlin'), help='Time zone for all dates and deadlines.') parser_configcreate.add_argument('--database_name', default=os.environ.get('OPENSUBMIT_DATABASE_NAME', '/tmp/database.sqlite'), help='Name of the database (file).'), parser_configcreate.add_argument('--database_engine', default=os.environ.get('OPENSUBMIT_DATABASE_ENGINE', 'sqlite3'), choices=['postgresql', 'mysql', 'sqlite3', 'oracle']) parser_configcreate.add_argument('--database_user', default=os.environ.get('OPENSUBMIT_DATABASE_USER', ''), help='The user name for accessing the database. Not needed for SQLite.') parser_configcreate.add_argument('--database_password', default=os.environ.get('OPENSUBMIT_DATABASE_PASSWORD', ''), help='The user password for accessing the database. Not needed for SQLite.') parser_configcreate.add_argument('--database_host', default=os.environ.get('OPENSUBMIT_DATABASE_HOST', ''), help='The host name for accessing the database. Not needed for SQLite. Default is localhost.') parser_configcreate.add_argument('--database_port', default=os.environ.get('OPENSUBMIT_DATABASE_PORT', ''), help='The port number for accessing the database. Not needed for SQLite.') parser_configcreate.add_argument('--login_google_oauth_key', default=os.environ.get('OPENSUBMIT_LOGIN_GOOGLE_OAUTH_KEY', ''), help='Google OAuth client key.') parser_configcreate.add_argument('--login_google_oauth_secret', default=os.environ.get('OPENSUBMIT_LOGIN_GOOGLE_OAUTH_SECRET', ''), help='Google OAuth client secret.') parser_configcreate.add_argument('--login_twitter_oauth_key', default=os.environ.get('OPENSUBMIT_LOGIN_TWITTER_OAUTH_KEY', ''), help='Twitter OAuth client key.') parser_configcreate.add_argument('--login_twitter_oauth_secret', default=os.environ.get('OPENSUBMIT_LOGIN_TWITTER_OAUTH_SECRET', ''), help='Twitter OAuth client secret.') parser_configcreate.add_argument('--login_github_oauth_key', default=os.environ.get('OPENSUBMIT_LOGIN_GITHUB_OAUTH_KEY', ''), help='GitHub OAuth client key.') parser_configcreate.add_argument('--login_github_oauth_secret', default=os.environ.get('OPENSUBMIT_LOGIN_GITHUB_OAUTH_SECRET', ''), help='GitHUb OAuth client secret.') parser_configcreate.add_argument('--login_gitlab_description', default=os.environ.get('OPENSUBMIT_LOGIN_GITLAB_DESCRIPTION', ''), help='Title of the GitLab login button.') parser_configcreate.add_argument('--login_gitlab_oauth_key', default=os.environ.get('OPENSUBMIT_LOGIN_GITLAB_OAUTH_KEY', ''), help='GitLab OAuth client key.') parser_configcreate.add_argument('--login_gitlab_oauth_secret', default=os.environ.get('OPENSUBMIT_LOGIN_GITLAB_OAUTH_SECRET', ''), help='GitLab OAuth client secret.') parser_configcreate.add_argument('--login_gitlab_url', default=os.environ.get('OPENSUBMIT_LOGIN_GITLAB_URL', ''), help='GitLab URL.') parser_configcreate.add_argument('--login_openid_description', default=os.environ.get('OPENSUBMIT_LOGIN_OPENID_DESCRIPTION', 'StackExchange'), help='Title of the OpenID login button.') parser_configcreate.add_argument('--login_openid_provider', default=os.environ.get('OPENSUBMIT_LOGIN_OPENID_PROVIDER', 'https://openid.stackexchange.com'), help='URL of the OpenID provider.') parser_configcreate.add_argument('--login_oidc_description', default=os.environ.get('OPENSUBMIT_LOGIN_OIDC_DESCRIPTION', ''), help='Title of the OpenID Connect login button.') parser_configcreate.add_argument('--login_oidc_endpoint', default=os.environ.get('OPENSUBMIT_LOGIN_OIDC_ENDPOINT', ''), help='URL of the OpenID Connect endpoint.') parser_configcreate.add_argument('--login_oidc_client_id', default=os.environ.get('OPENSUBMIT_LOGIN_OIDC_CLIENT_ID', ''), help='OpenID Connect client id.') parser_configcreate.add_argument('--login_oidc_client_secret', default=os.environ.get('OPENSUBMIT_LOGIN_OIDC_CLIENT_SECRET', ''), help='OpenID Connect client secret.') parser_configcreate.add_argument('--login_shib_description', default=os.environ.get('OPENSUBMIT_LOGIN_SHIB_DESCRIPTION', ''), help='Title of the Shibboleth login button.') parser_configcreate.add_argument('--login_demo', default=is_str_true(os.environ.get('OPENSUBMIT_LOGIN_DEMO', 'False')), action='store_true', help='Offer demo login options.') parser_configcreate.add_argument('--admin_name', default=os.environ.get('OPENSUBMIT_ADMIN_NAME', 'OpenSubmit Administrator'), help='Name of the administrator, shown in privacy policy, impress and backend.') parser_configcreate.add_argument('--admin_email', default=os.environ.get('OPENSUBMIT_ADMIN_EMAIL', 'root@localhost'), help='eMail of the administrator, shown in privacy policy, impress and backend.') parser_configcreate.add_argument('--admin_address', default=os.environ.get('OPENSUBMIT_ADMIN_ADDRESS', '(address available by eMail)'), help='Address of the administrator, shown in privacy policy and impress.') parser_configcreate.add_argument('--admin_impress_page', default=os.environ.get('OPENSUBMIT_IMPRESS_PAGE', ''), help='Link to alternative impress page.') parser_configcreate.add_argument('--admin_privacy_page', default=os.environ.get('OPENSUBMIT_PRIVACY_PAGE', ''), help='Link to alternative privacy policy page.') parser_configcreate.add_argument('--whitelist_openid', default=os.environ.get('OPENSUBMIT_WHITELIST_OPENID', ''), help='Comma-separated list of allowed email addresses for OpenID login.') parser_configcreate.add_argument('--whitelist_twitter', default=os.environ.get('OPENSUBMIT_WHITELIST_TWITTER', ''), help='Comma-separated list of allowed email addresses for Twitter login.') parser_configcreate.add_argument('--whitelist_google', default=os.environ.get('OPENSUBMIT_WHITELIST_GOOGLE', ''), help='Comma-separated list of allowed email addresses for Google login.') parser_configcreate.add_argument('--whitelist_github', default=os.environ.get('OPENSUBMIT_WHITELIST_GITHUB', ''), help='Comma-separated list of allowed email addresses for GitHub login.') parser_configcreate.add_argument('--whitelist_gitlab', default=os.environ.get('OPENSUBMIT_WHITELIST_GITLAB', ''), help='Comma-separated list of allowed email addresses for GitLab login.') parser_configcreate.add_argument('--whitelist_oidc', default=os.environ.get('OPENSUBMIT_WHITELIST_OIDC', ''), help='Comma-separated list of allowed email addresses for OpenID connect login.') parser_configcreate.add_argument('--whitelist_shib', default=os.environ.get('OPENSUBMIT_WHITELIST_SHIB', ''), help='Comma-separated list of allowed email addresses for Shibboleth login.') subparsers.add_parser('configtest', aliases=['configure'], help='Check config files and database for correct installation of the OpenSubmit web server.') subparsers.add_parser('democreate', aliases=['createdemo'], help='Install some test data (courses, assignments, users).') subparsers.add_parser('apachecreate', help='Create config file snippet for Apache 2.4.') subparsers.add_parser('fixperms', help='Check and fix student and tutor permissions.') subparsers.add_parser('configdump', aliases=['dumpconfig'], help='Show effective OpenSubmit configuration at run-time.') subparsers.add_parser('fixchecksums', help='Re-create all student file checksums (for duplicate detection).') parser_makeadmin = subparsers.add_parser('makeadmin', help='Make this user an admin with backend rights.') parser_makeadmin.add_argument('email') parser_makeowner = subparsers.add_parser('makeowner', help='Make this user a course owner with backend rights.') parser_makeowner.add_argument('email') parser_maketutor = subparsers.add_parser('maketutor', help='Make this user a course tutor with backend rights.') parser_maketutor.add_argument('email') parser_makestudent = subparsers.add_parser('makestudent', help='Make this user a student without backend rights.') parser_makestudent.add_argument('email') args = parser.parse_args() config_file = fsroot + args.config if args.command == 'apachecreate': config = check_web_config(config_file) if config: apache_config(config, os.path.dirname(config_file) + os.sep + 'apache24.conf') return if args.command == 'configcreate': configcreate(config_file, vars(args)) return if args.command == 'configtest': configtest(config_file) return if args.command in ['fixperms', 'fixchecksums', 'democreate', 'createdemo', 'dumpconfig', 'configdump']: django_admin([args.command]) return if args.command in ['makeadmin', 'makeowner', 'maketutor', 'makestudent']: django_admin([args.command, args.email]) return
[ "def", "console_script", "(", "fsroot", "=", "''", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ",", "description", "=", "'Administration for the OpenSubmit web application.'", ...
The main entry point for the production administration script 'opensubmit-web'. The argument allows the test suite to override the root of all paths used in here.
[ "The", "main", "entry", "point", "for", "the", "production", "administration", "script", "opensubmit", "-", "web", ".", "The", "argument", "allows", "the", "test", "suite", "to", "override", "the", "root", "of", "all", "paths", "used", "in", "here", "." ]
python
train
trailofbits/manticore
manticore/utils/config.py
https://github.com/trailofbits/manticore/blob/54c5a15b1119c523ae54c09972413e8b97f11629/manticore/utils/config.py#L194-L209
def save(f): """ Save current config state to an yml file stream identified by |f| :param f: where to write the config file """ global _groups c = {} for group_name, group in _groups.items(): section = {var.name: var.value for var in group.updated_vars()} if not section: continue c[group_name] = section yaml.safe_dump(c, f, line_break=True)
[ "def", "save", "(", "f", ")", ":", "global", "_groups", "c", "=", "{", "}", "for", "group_name", ",", "group", "in", "_groups", ".", "items", "(", ")", ":", "section", "=", "{", "var", ".", "name", ":", "var", ".", "value", "for", "var", "in", ...
Save current config state to an yml file stream identified by |f| :param f: where to write the config file
[ "Save", "current", "config", "state", "to", "an", "yml", "file", "stream", "identified", "by", "|f|" ]
python
valid
JdeRobot/base
src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v20/ardupilotmega.py
https://github.com/JdeRobot/base/blob/303b18992785b2fe802212f2d758a60873007f1f/src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v20/ardupilotmega.py#L10207-L10216
def wind_send(self, direction, speed, speed_z, force_mavlink1=False): ''' Wind estimation direction : wind direction that wind is coming from (degrees) (float) speed : wind speed in ground plane (m/s) (float) speed_z : vertical wind speed (m/s) (float) ''' return self.send(self.wind_encode(direction, speed, speed_z), force_mavlink1=force_mavlink1)
[ "def", "wind_send", "(", "self", ",", "direction", ",", "speed", ",", "speed_z", ",", "force_mavlink1", "=", "False", ")", ":", "return", "self", ".", "send", "(", "self", ".", "wind_encode", "(", "direction", ",", "speed", ",", "speed_z", ")", ",", "f...
Wind estimation direction : wind direction that wind is coming from (degrees) (float) speed : wind speed in ground plane (m/s) (float) speed_z : vertical wind speed (m/s) (float)
[ "Wind", "estimation" ]
python
train
apache/incubator-mxnet
python/mxnet/gluon/trainer.py
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/python/mxnet/gluon/trainer.py#L159-L167
def _reset_kvstore(self): """Reset kvstore.""" if self._kvstore and 'dist' in self._kvstore.type: raise RuntimeError("Cannot reset distributed KVStore.") self._kv_initialized = False self._kvstore = None self._distributed = None self._update_on_kvstore = None self._params_to_init = [param for param in self._params]
[ "def", "_reset_kvstore", "(", "self", ")", ":", "if", "self", ".", "_kvstore", "and", "'dist'", "in", "self", ".", "_kvstore", ".", "type", ":", "raise", "RuntimeError", "(", "\"Cannot reset distributed KVStore.\"", ")", "self", ".", "_kv_initialized", "=", "F...
Reset kvstore.
[ "Reset", "kvstore", "." ]
python
train
mixmastamyk/console
console/proximity.py
https://github.com/mixmastamyk/console/blob/afe6c95d5a7b83d85376f450454e3769e4a5c3d0/console/proximity.py#L57-L74
def build_color_tables(base=color_tables.vga_palette4): ''' Create the color tables for palette downgrade support, starting with the platform-specific 16 from the color tables module. Save as global state. :-/ ''' base = [] if base is None else base # make sure we have them before clearing table4 = _build_color_table(base, extended=False) if table4: color_table4.clear() color_table4.extend(table4) table8 = _build_color_table(base) if table8: color_table8.clear() color_table8.extend(table8)
[ "def", "build_color_tables", "(", "base", "=", "color_tables", ".", "vga_palette4", ")", ":", "base", "=", "[", "]", "if", "base", "is", "None", "else", "base", "# make sure we have them before clearing", "table4", "=", "_build_color_table", "(", "base", ",", "e...
Create the color tables for palette downgrade support, starting with the platform-specific 16 from the color tables module. Save as global state. :-/
[ "Create", "the", "color", "tables", "for", "palette", "downgrade", "support", "starting", "with", "the", "platform", "-", "specific", "16", "from", "the", "color", "tables", "module", ".", "Save", "as", "global", "state", ".", ":", "-", "/" ]
python
train
astrocatalogs/astrocats
astrocats/catalog/utils/imports.py
https://github.com/astrocatalogs/astrocats/blob/11abc3131c6366ecd23964369e55ff264add7805/astrocats/catalog/utils/imports.py#L37-L60
def compress_gz(fname): """Compress the file with the given name and delete the uncompressed file. The compressed filename is simply the input filename with '.gz' appended. Arguments --------- fname : str Name of the file to compress and delete. Returns ------- comp_fname : str Name of the compressed file produced. Equal to `fname + '.gz'`. """ import shutil import gzip comp_fname = fname + '.gz' with codecs.open(fname, 'rb') as f_in, gzip.open( comp_fname, 'wb') as f_out: shutil.copyfileobj(f_in, f_out) os.remove(fname) return comp_fname
[ "def", "compress_gz", "(", "fname", ")", ":", "import", "shutil", "import", "gzip", "comp_fname", "=", "fname", "+", "'.gz'", "with", "codecs", ".", "open", "(", "fname", ",", "'rb'", ")", "as", "f_in", ",", "gzip", ".", "open", "(", "comp_fname", ",",...
Compress the file with the given name and delete the uncompressed file. The compressed filename is simply the input filename with '.gz' appended. Arguments --------- fname : str Name of the file to compress and delete. Returns ------- comp_fname : str Name of the compressed file produced. Equal to `fname + '.gz'`.
[ "Compress", "the", "file", "with", "the", "given", "name", "and", "delete", "the", "uncompressed", "file", "." ]
python
train
DLR-RM/RAFCON
source/rafcon/utils/geometry.py
https://github.com/DLR-RM/RAFCON/blob/24942ef1a904531f49ab8830a1dbb604441be498/source/rafcon/utils/geometry.py#L48-L67
def point_on_line(point, line_start, line_end, accuracy=50.): """Checks whether a point lies on a line The function checks whether the point "point" (P) lies on the line defined by its starting point line_start (A) and its end point line_end (B). This is done by comparing the distance of [AB] with the sum of the distances [AP] and [PB]. If the difference is smaller than [AB] / accuracy, the point P is assumed to be on the line. By increasing the value of accuracy (the default is 50), the tolerance is decreased. :param point: Point to be checked (tuple with x any y coordinate) :param line_start: Starting point of the line (tuple with x any y coordinate) :param line_end: End point of the line (tuple with x any y coordinate) :param accuracy: The higher this value, the less distance is tolerated :return: True if the point is one the line, False if not """ length = dist(line_start, line_end) ds = length / float(accuracy) if -ds < (dist(line_start, point) + dist(point, line_end) - length) < ds: return True return False
[ "def", "point_on_line", "(", "point", ",", "line_start", ",", "line_end", ",", "accuracy", "=", "50.", ")", ":", "length", "=", "dist", "(", "line_start", ",", "line_end", ")", "ds", "=", "length", "/", "float", "(", "accuracy", ")", "if", "-", "ds", ...
Checks whether a point lies on a line The function checks whether the point "point" (P) lies on the line defined by its starting point line_start (A) and its end point line_end (B). This is done by comparing the distance of [AB] with the sum of the distances [AP] and [PB]. If the difference is smaller than [AB] / accuracy, the point P is assumed to be on the line. By increasing the value of accuracy (the default is 50), the tolerance is decreased. :param point: Point to be checked (tuple with x any y coordinate) :param line_start: Starting point of the line (tuple with x any y coordinate) :param line_end: End point of the line (tuple with x any y coordinate) :param accuracy: The higher this value, the less distance is tolerated :return: True if the point is one the line, False if not
[ "Checks", "whether", "a", "point", "lies", "on", "a", "line" ]
python
train
materialsvirtuallab/monty
monty/termcolor.py
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/termcolor.py#L196-L234
def get_terminal_size(): """" Return the size of the terminal as (nrow, ncols) Based on: http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python """ try: rc = os.popen('stty size', 'r').read().split() return int(rc[0]), int(rc[1]) except: pass env = os.environ def ioctl_GWINSZ(fd): try: import fcntl, termios, struct rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) return rc except: return None rc = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) if not rc: try: fd = os.open(os.ctermid(), os.O_RDONLY) rc = ioctl_GWINSZ(fd) os.close(fd) except: pass if not rc: rc = (env.get('LINES', 25), env.get('COLUMNS', 80)) return int(rc[0]), int(rc[1])
[ "def", "get_terminal_size", "(", ")", ":", "try", ":", "rc", "=", "os", ".", "popen", "(", "'stty size'", ",", "'r'", ")", ".", "read", "(", ")", ".", "split", "(", ")", "return", "int", "(", "rc", "[", "0", "]", ")", ",", "int", "(", "rc", "...
Return the size of the terminal as (nrow, ncols) Based on: http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python
[ "Return", "the", "size", "of", "the", "terminal", "as", "(", "nrow", "ncols", ")" ]
python
train
learningequality/ricecooker
ricecooker/utils/metadata_provider.py
https://github.com/learningequality/ricecooker/blob/2f0385282500cb77ef2894646c6f9ce11bd7a853/ricecooker/utils/metadata_provider.py#L294-L308
def _map_channel_row_to_dict(self, row): """ Convert dictionary keys from raw csv format (see CHANNEL_INFO_HEADER), to ricecooker-like keys, e.g., ''Source ID' --> 'source_id' """ channel_cleaned = _clean_dict(row) channel_dict = dict( title=channel_cleaned[CHANNEL_TITLE_KEY], description=channel_cleaned[CHANNEL_DESCRIPTION_KEY], source_domain=channel_cleaned[CHANNEL_DOMAIN_KEY], source_id=channel_cleaned[CHANNEL_SOURCEID_KEY], language=channel_cleaned[CHANNEL_LANGUAGE_KEY], thumbnail_chan_path=channel_cleaned[CHANNEL_THUMBNAIL_KEY] ) return channel_dict
[ "def", "_map_channel_row_to_dict", "(", "self", ",", "row", ")", ":", "channel_cleaned", "=", "_clean_dict", "(", "row", ")", "channel_dict", "=", "dict", "(", "title", "=", "channel_cleaned", "[", "CHANNEL_TITLE_KEY", "]", ",", "description", "=", "channel_clea...
Convert dictionary keys from raw csv format (see CHANNEL_INFO_HEADER), to ricecooker-like keys, e.g., ''Source ID' --> 'source_id'
[ "Convert", "dictionary", "keys", "from", "raw", "csv", "format", "(", "see", "CHANNEL_INFO_HEADER", ")", "to", "ricecooker", "-", "like", "keys", "e", ".", "g", ".", "Source", "ID", "--", ">", "source_id" ]
python
train
sebdah/dynamic-dynamodb
dynamic_dynamodb/statistics/table.py
https://github.com/sebdah/dynamic-dynamodb/blob/bfd0ca806b1c3301e724696de90ef0f973410493/dynamic_dynamodb/statistics/table.py#L340-L378
def __get_aws_metric(table_name, lookback_window_start, lookback_period, metric_name): """ Returns a metric list from the AWS CloudWatch service, may return None if no metric exists :type table_name: str :param table_name: Name of the DynamoDB table :type lookback_window_start: int :param lookback_window_start: How many minutes to look at :type lookback_period: int :type lookback_period: Length of the lookback period in minutes :type metric_name: str :param metric_name: Name of the metric to retrieve from CloudWatch :returns: list -- A list of time series data for the given metric, may be None if there was no data """ try: now = datetime.utcnow() start_time = now - timedelta(minutes=lookback_window_start) end_time = now - timedelta( minutes=lookback_window_start - lookback_period) return cloudwatch_connection.get_metric_statistics( period=lookback_period * 60, start_time=start_time, end_time=end_time, metric_name=metric_name, namespace='AWS/DynamoDB', statistics=['Sum'], dimensions={'TableName': table_name}, unit='Count') except BotoServerError as error: logger.error( 'Unknown boto error. Status: "{0}". ' 'Reason: "{1}". Message: {2}'.format( error.status, error.reason, error.message)) raise
[ "def", "__get_aws_metric", "(", "table_name", ",", "lookback_window_start", ",", "lookback_period", ",", "metric_name", ")", ":", "try", ":", "now", "=", "datetime", ".", "utcnow", "(", ")", "start_time", "=", "now", "-", "timedelta", "(", "minutes", "=", "l...
Returns a metric list from the AWS CloudWatch service, may return None if no metric exists :type table_name: str :param table_name: Name of the DynamoDB table :type lookback_window_start: int :param lookback_window_start: How many minutes to look at :type lookback_period: int :type lookback_period: Length of the lookback period in minutes :type metric_name: str :param metric_name: Name of the metric to retrieve from CloudWatch :returns: list -- A list of time series data for the given metric, may be None if there was no data
[ "Returns", "a", "metric", "list", "from", "the", "AWS", "CloudWatch", "service", "may", "return", "None", "if", "no", "metric", "exists" ]
python
train
PierreRust/apigpio
apigpio/apigpio.py
https://github.com/PierreRust/apigpio/blob/2b969f40e06219b43a43498d8baf87f5935ceab2/apigpio/apigpio.py#L802-L814
def set_pull_up_down(self, gpio, pud): """ Sets or clears the internal GPIO pull-up/down resistor. gpio:= 0-53. pud:= PUD_UP, PUD_DOWN, PUD_OFF. ... yield from pi.set_pull_up_down(17, apigpio.PUD_OFF) yield from pi.set_pull_up_down(23, apigpio.PUD_UP) yield from pi.set_pull_up_down(24, apigpio.PUD_DOWN) ... """ res = yield from self._pigpio_aio_command(_PI_CMD_PUD, gpio, pud) return _u2i(res)
[ "def", "set_pull_up_down", "(", "self", ",", "gpio", ",", "pud", ")", ":", "res", "=", "yield", "from", "self", ".", "_pigpio_aio_command", "(", "_PI_CMD_PUD", ",", "gpio", ",", "pud", ")", "return", "_u2i", "(", "res", ")" ]
Sets or clears the internal GPIO pull-up/down resistor. gpio:= 0-53. pud:= PUD_UP, PUD_DOWN, PUD_OFF. ... yield from pi.set_pull_up_down(17, apigpio.PUD_OFF) yield from pi.set_pull_up_down(23, apigpio.PUD_UP) yield from pi.set_pull_up_down(24, apigpio.PUD_DOWN) ...
[ "Sets", "or", "clears", "the", "internal", "GPIO", "pull", "-", "up", "/", "down", "resistor", ".", "gpio", ":", "=", "0", "-", "53", ".", "pud", ":", "=", "PUD_UP", "PUD_DOWN", "PUD_OFF", ".", "...", "yield", "from", "pi", ".", "set_pull_up_down", "...
python
train
tBuLi/symfit
symfit/core/leastsqbound.py
https://github.com/tBuLi/symfit/blob/759dd3d1d4270510d651f40b23dd26b1b10eee83/symfit/core/leastsqbound.py#L30-L42
def _internal2external_func(bounds): """ Make a function which converts between internal (unconstrained) and external (constrained) parameters. """ ls = [_internal2external_lambda(b) for b in bounds] def convert_i2e(xi): xe = empty_like(xi) xe[:] = [l(p) for l, p in zip(ls, xi)] return xe return convert_i2e
[ "def", "_internal2external_func", "(", "bounds", ")", ":", "ls", "=", "[", "_internal2external_lambda", "(", "b", ")", "for", "b", "in", "bounds", "]", "def", "convert_i2e", "(", "xi", ")", ":", "xe", "=", "empty_like", "(", "xi", ")", "xe", "[", ":", ...
Make a function which converts between internal (unconstrained) and external (constrained) parameters.
[ "Make", "a", "function", "which", "converts", "between", "internal", "(", "unconstrained", ")", "and", "external", "(", "constrained", ")", "parameters", "." ]
python
train
log2timeline/plaso
plaso/parsers/bash_history.py
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/parsers/bash_history.py#L84-L96
def VerifyStructure(self, parser_mediator, lines): """Verifies that this is a bash history file. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. lines (str): one or more lines from the text file. Returns: bool: True if this is the correct parser, False otherwise. """ match_generator = self._VERIFICATION_GRAMMAR.scanString(lines, maxMatches=1) return bool(list(match_generator))
[ "def", "VerifyStructure", "(", "self", ",", "parser_mediator", ",", "lines", ")", ":", "match_generator", "=", "self", ".", "_VERIFICATION_GRAMMAR", ".", "scanString", "(", "lines", ",", "maxMatches", "=", "1", ")", "return", "bool", "(", "list", "(", "match...
Verifies that this is a bash history file. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. lines (str): one or more lines from the text file. Returns: bool: True if this is the correct parser, False otherwise.
[ "Verifies", "that", "this", "is", "a", "bash", "history", "file", "." ]
python
train
twisted/mantissa
axiom/plugins/mantissacmd.py
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/axiom/plugins/mantissacmd.py#L141-L203
def _createCert(self, hostname, serial): """ Create a self-signed X.509 certificate. @type hostname: L{unicode} @param hostname: The hostname this certificate should be valid for. @type serial: L{int} @param serial: The serial number the certificate should have. @rtype: L{bytes} @return: The serialized certificate in PEM format. """ privateKey = rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend()) publicKey = privateKey.public_key() name = x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, hostname)]) certificate = ( x509.CertificateBuilder() .subject_name(name) .issuer_name(name) .not_valid_before(datetime.today() - timedelta(days=1)) .not_valid_after(datetime.today() + timedelta(days=365)) .serial_number(serial) .public_key(publicKey) .add_extension( x509.BasicConstraints(ca=False, path_length=None), critical=True) .add_extension( x509.SubjectAlternativeName([ x509.DNSName(hostname)]), critical=False) .add_extension( x509.KeyUsage( digital_signature=True, content_commitment=False, key_encipherment=True, data_encipherment=False, key_agreement=False, key_cert_sign=False, crl_sign=False, encipher_only=False, decipher_only=False), critical=True) .add_extension( x509.ExtendedKeyUsage([ ExtendedKeyUsageOID.SERVER_AUTH]), critical=False) .sign( private_key=privateKey, algorithm=hashes.SHA256(), backend=default_backend())) return '\n'.join([ privateKey.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption()), certificate.public_bytes( encoding=serialization.Encoding.PEM), ])
[ "def", "_createCert", "(", "self", ",", "hostname", ",", "serial", ")", ":", "privateKey", "=", "rsa", ".", "generate_private_key", "(", "public_exponent", "=", "65537", ",", "key_size", "=", "2048", ",", "backend", "=", "default_backend", "(", ")", ")", "...
Create a self-signed X.509 certificate. @type hostname: L{unicode} @param hostname: The hostname this certificate should be valid for. @type serial: L{int} @param serial: The serial number the certificate should have. @rtype: L{bytes} @return: The serialized certificate in PEM format.
[ "Create", "a", "self", "-", "signed", "X", ".", "509", "certificate", "." ]
python
train
unt-libraries/pyuntl
pyuntl/form_logic.py
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/form_logic.py#L101-L105
def get_group_usage_link(self): """Get the usage link for the group element.""" first_element = self.group_list[0] usage_link = getattr(first_element.form, 'usage_link', None) return usage_link
[ "def", "get_group_usage_link", "(", "self", ")", ":", "first_element", "=", "self", ".", "group_list", "[", "0", "]", "usage_link", "=", "getattr", "(", "first_element", ".", "form", ",", "'usage_link'", ",", "None", ")", "return", "usage_link" ]
Get the usage link for the group element.
[ "Get", "the", "usage", "link", "for", "the", "group", "element", "." ]
python
train
AndrewAnnex/SpiceyPy
spiceypy/spiceypy.py
https://github.com/AndrewAnnex/SpiceyPy/blob/fc20a9b9de68b58eed5b332f0c051fb343a6e335/spiceypy/spiceypy.py#L1603-L1618
def cvpool(agent): """ Indicate whether or not any watched kernel variables that have a specified agent on their notification list have been updated. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cvpool_c.html :param agent: Name of the agent to check for notices. :type agent: str :return: True if variables for "agent" have been updated. :rtype: bool """ agent = stypes.stringToCharP(agent) update = ctypes.c_int() libspice.cvpool_c(agent, ctypes.byref(update)) return bool(update.value)
[ "def", "cvpool", "(", "agent", ")", ":", "agent", "=", "stypes", ".", "stringToCharP", "(", "agent", ")", "update", "=", "ctypes", ".", "c_int", "(", ")", "libspice", ".", "cvpool_c", "(", "agent", ",", "ctypes", ".", "byref", "(", "update", ")", ")"...
Indicate whether or not any watched kernel variables that have a specified agent on their notification list have been updated. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cvpool_c.html :param agent: Name of the agent to check for notices. :type agent: str :return: True if variables for "agent" have been updated. :rtype: bool
[ "Indicate", "whether", "or", "not", "any", "watched", "kernel", "variables", "that", "have", "a", "specified", "agent", "on", "their", "notification", "list", "have", "been", "updated", "." ]
python
train
spotify/luigi
luigi/configuration/cfg_parser.py
https://github.com/spotify/luigi/blob/c5eca1c3c3ee2a7eb612486192a0da146710a1e9/luigi/configuration/cfg_parser.py#L156-L183
def _get_with_default(self, method, section, option, default, expected_type=None, **kwargs): """ Gets the value of the section/option using method. Returns default if value is not found. Raises an exception if the default value is not None and doesn't match the expected_type. """ try: try: # Underscore-style is the recommended configuration style option = option.replace('-', '_') return method(self, section, option, **kwargs) except (NoOptionError, NoSectionError): # Support dash-style option names (with deprecation warning). option_alias = option.replace('_', '-') value = method(self, section, option_alias, **kwargs) warn = 'Configuration [{s}] {o} (with dashes) should be avoided. Please use underscores: {u}.'.format( s=section, o=option_alias, u=option) warnings.warn(warn, DeprecationWarning) return value except (NoOptionError, NoSectionError): if default is LuigiConfigParser.NO_DEFAULT: raise if expected_type is not None and default is not None and \ not isinstance(default, expected_type): raise return default
[ "def", "_get_with_default", "(", "self", ",", "method", ",", "section", ",", "option", ",", "default", ",", "expected_type", "=", "None", ",", "*", "*", "kwargs", ")", ":", "try", ":", "try", ":", "# Underscore-style is the recommended configuration style", "opt...
Gets the value of the section/option using method. Returns default if value is not found. Raises an exception if the default value is not None and doesn't match the expected_type.
[ "Gets", "the", "value", "of", "the", "section", "/", "option", "using", "method", "." ]
python
train
frawau/aiolifx
aiolifx/aiolifx.py
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L490-L496
def resp_set_group(self, resp, group=None): """Default callback for get_group/set_group """ if group: self.group=group elif resp: self.group=resp.label.decode().replace("\x00", "")
[ "def", "resp_set_group", "(", "self", ",", "resp", ",", "group", "=", "None", ")", ":", "if", "group", ":", "self", ".", "group", "=", "group", "elif", "resp", ":", "self", ".", "group", "=", "resp", ".", "label", ".", "decode", "(", ")", ".", "r...
Default callback for get_group/set_group
[ "Default", "callback", "for", "get_group", "/", "set_group" ]
python
train
edx/edx-enterprise
enterprise/api/v1/serializers.py
https://github.com/edx/edx-enterprise/blob/aea91379ab0a87cd3bc798961fce28b60ee49a80/enterprise/api/v1/serializers.py#L658-L675
def validate_user_email(self, value): """ Validates the user_email, if given, to see if an existing EnterpriseCustomerUser exists for it. If it does not, it does not fail validation, unlike for the other field validation methods above. """ enterprise_customer = self.context.get('enterprise_customer') try: user = User.objects.get(email=value) return models.EnterpriseCustomerUser.objects.get( user_id=user.id, enterprise_customer=enterprise_customer ) except (models.EnterpriseCustomerUser.DoesNotExist, User.DoesNotExist): pass return value
[ "def", "validate_user_email", "(", "self", ",", "value", ")", ":", "enterprise_customer", "=", "self", ".", "context", ".", "get", "(", "'enterprise_customer'", ")", "try", ":", "user", "=", "User", ".", "objects", ".", "get", "(", "email", "=", "value", ...
Validates the user_email, if given, to see if an existing EnterpriseCustomerUser exists for it. If it does not, it does not fail validation, unlike for the other field validation methods above.
[ "Validates", "the", "user_email", "if", "given", "to", "see", "if", "an", "existing", "EnterpriseCustomerUser", "exists", "for", "it", "." ]
python
valid
saltstack/salt
salt/daemons/masterapi.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/daemons/masterapi.py#L678-L696
def _mine_delete(self, load): ''' Allow the minion to delete a specific function from its own mine ''' if 'id' not in load or 'fun' not in load: return False if self.opts.get('minion_data_cache', False) or self.opts.get('enforce_mine_cache', False): cbank = 'minions/{0}'.format(load['id']) ckey = 'mine' try: data = self.cache.fetch(cbank, ckey) if not isinstance(data, dict): return False if load['fun'] in data: del data[load['fun']] self.cache.store(cbank, ckey, data) except OSError: return False return True
[ "def", "_mine_delete", "(", "self", ",", "load", ")", ":", "if", "'id'", "not", "in", "load", "or", "'fun'", "not", "in", "load", ":", "return", "False", "if", "self", ".", "opts", ".", "get", "(", "'minion_data_cache'", ",", "False", ")", "or", "sel...
Allow the minion to delete a specific function from its own mine
[ "Allow", "the", "minion", "to", "delete", "a", "specific", "function", "from", "its", "own", "mine" ]
python
train
raiden-network/raiden-contracts
raiden_contracts/deploy/contract_deployer.py
https://github.com/raiden-network/raiden-contracts/blob/a7e72a9477f2204b03f3706360ea8d9c0a8e7063/raiden_contracts/deploy/contract_deployer.py#L338-L386
def deploy_service_contracts( self, token_address: str, user_deposit_whole_balance_limit: int, ): """Deploy 3rd party service contracts""" chain_id = int(self.web3.version.network) deployed_contracts: DeployedContracts = { 'contracts_version': self.contract_version_string(), 'chain_id': chain_id, 'contracts': {}, } self._deploy_and_remember(CONTRACT_SERVICE_REGISTRY, [token_address], deployed_contracts) user_deposit = self._deploy_and_remember( contract_name=CONTRACT_USER_DEPOSIT, arguments=[token_address, user_deposit_whole_balance_limit], deployed_contracts=deployed_contracts, ) monitoring_service_constructor_args = [ token_address, deployed_contracts['contracts'][CONTRACT_SERVICE_REGISTRY]['address'], deployed_contracts['contracts'][CONTRACT_USER_DEPOSIT]['address'], ] msc = self._deploy_and_remember( contract_name=CONTRACT_MONITORING_SERVICE, arguments=monitoring_service_constructor_args, deployed_contracts=deployed_contracts, ) one_to_n = self._deploy_and_remember( contract_name=CONTRACT_ONE_TO_N, arguments=[user_deposit.address, chain_id], deployed_contracts=deployed_contracts, ) # Tell the UserDeposit instance about other contracts. LOG.debug( 'Calling UserDeposit.init() with ' f'msc_address={msc.address} ' f'one_to_n_address={one_to_n.address}', ) self.transact(user_deposit.functions.init( _msc_address=msc.address, _one_to_n_address=one_to_n.address, )) return deployed_contracts
[ "def", "deploy_service_contracts", "(", "self", ",", "token_address", ":", "str", ",", "user_deposit_whole_balance_limit", ":", "int", ",", ")", ":", "chain_id", "=", "int", "(", "self", ".", "web3", ".", "version", ".", "network", ")", "deployed_contracts", "...
Deploy 3rd party service contracts
[ "Deploy", "3rd", "party", "service", "contracts" ]
python
train
costastf/locationsharinglib
locationsharinglib/locationsharinglib.py
https://github.com/costastf/locationsharinglib/blob/dcd74b0cdb59b951345df84987238763e50ef282/locationsharinglib/locationsharinglib.py#L126-L137
def get_shared_people(self): """Retrieves all people that share their location with this account""" people = [] output = self._get_data() self._logger.debug(output) shared_entries = output[0] or [] for info in shared_entries: try: people.append(Person(info)) except InvalidData: self._logger.debug('Missing location or other info, dropping person with info: %s', info) return people
[ "def", "get_shared_people", "(", "self", ")", ":", "people", "=", "[", "]", "output", "=", "self", ".", "_get_data", "(", ")", "self", ".", "_logger", ".", "debug", "(", "output", ")", "shared_entries", "=", "output", "[", "0", "]", "or", "[", "]", ...
Retrieves all people that share their location with this account
[ "Retrieves", "all", "people", "that", "share", "their", "location", "with", "this", "account" ]
python
train
kdeldycke/maildir-deduplicate
maildir_deduplicate/deduplicate.py
https://github.com/kdeldycke/maildir-deduplicate/blob/f1c6ff25b80c6c1a4dc2dc7a65b34d808b0b7733/maildir_deduplicate/deduplicate.py#L382-L403
def delete_biggest(self): """ Delete all the biggest duplicates. Keeps all mail of the duplicate set but those sharing the biggest size. """ logger.info( "Deleting all mails sharing the biggest size of {} bytes..." "".format(self.biggest_size)) # Select candidates for deletion. candidates = [ mail for mail in self.pool if mail.size == self.biggest_size] if len(candidates) == self.size: logger.warning( "Skip deletion: all {} mails share the same size." "".format(self.size)) return logger.info( "{} candidates found for deletion.".format(len(candidates))) for mail in candidates: self.delete(mail)
[ "def", "delete_biggest", "(", "self", ")", ":", "logger", ".", "info", "(", "\"Deleting all mails sharing the biggest size of {} bytes...\"", "\"\"", ".", "format", "(", "self", ".", "biggest_size", ")", ")", "# Select candidates for deletion.", "candidates", "=", "[", ...
Delete all the biggest duplicates. Keeps all mail of the duplicate set but those sharing the biggest size.
[ "Delete", "all", "the", "biggest", "duplicates", "." ]
python
train
pypa/pipenv
pipenv/patched/notpip/_internal/wheel.py
https://github.com/pypa/pipenv/blob/cae8d76c210b9777e90aab76e9c4b0e53bb19cde/pipenv/patched/notpip/_internal/wheel.py#L940-L971
def _build_one_legacy(self, req, tempd, python_tag=None): """Build one InstallRequirement using the "legacy" build process. Returns path to wheel if successfully built. Otherwise, returns None. """ base_args = self._base_setup_args(req) spin_message = 'Building wheel for %s (setup.py)' % (req.name,) with open_spinner(spin_message) as spinner: logger.debug('Destination directory: %s', tempd) wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ + self.build_options if python_tag is not None: wheel_args += ["--python-tag", python_tag] try: output = call_subprocess(wheel_args, cwd=req.setup_py_dir, show_stdout=False, spinner=spinner) except Exception: spinner.finish("error") logger.error('Failed building wheel for %s', req.name) return None names = os.listdir(tempd) wheel_path = get_legacy_build_wheel_path( names=names, temp_dir=tempd, req=req, command_args=wheel_args, command_output=output, ) return wheel_path
[ "def", "_build_one_legacy", "(", "self", ",", "req", ",", "tempd", ",", "python_tag", "=", "None", ")", ":", "base_args", "=", "self", ".", "_base_setup_args", "(", "req", ")", "spin_message", "=", "'Building wheel for %s (setup.py)'", "%", "(", "req", ".", ...
Build one InstallRequirement using the "legacy" build process. Returns path to wheel if successfully built. Otherwise, returns None.
[ "Build", "one", "InstallRequirement", "using", "the", "legacy", "build", "process", "." ]
python
train
Yelp/kafka-utils
kafka_utils/util/ssh.py
https://github.com/Yelp/kafka-utils/blob/cdb4d64308f3079ee0873250bf7b34d0d94eca50/kafka_utils/util/ssh.py#L95-L163
def ssh(host, forward_agent=False, sudoable=False, max_attempts=1, max_timeout=5, ssh_password=None): """Manages a SSH connection to the desired host. Will leverage your ssh config at ~/.ssh/config if available :param host: the server to connect to :type host: str :param forward_agent: forward the local agents :type forward_agent: bool :param sudoable: allow sudo commands :type sudoable: bool :param max_attempts: the maximum attempts to connect to the desired host :type max_attempts: int :param max_timeout: the maximum timeout in seconds to sleep between attempts :type max_timeout: int :param ssh_password: SSH password to use if needed :type ssh_password: str :returns a SSH connection to the desired host :rtype: Connection :raises MaxConnectionAttemptsError: Exceeded the maximum attempts to establish the SSH connection. """ with closing(SSHClient()) as client: client.set_missing_host_key_policy(AutoAddPolicy()) cfg = { "hostname": host, "timeout": max_timeout, } if ssh_password: cfg['password'] = ssh_password ssh_config = SSHConfig() user_config_file = os.path.expanduser("~/.ssh/config") if os.path.exists(user_config_file): with open(user_config_file) as f: ssh_config.parse(f) host_config = ssh_config.lookup(host) if "user" in host_config: cfg["username"] = host_config["user"] if "proxycommand" in host_config: cfg["sock"] = ProxyCommand(host_config["proxycommand"]) if "identityfile" in host_config: cfg['key_filename'] = host_config['identityfile'] if "port" in host_config: cfg["port"] = int(host_config["port"]) attempts = 0 while attempts < max_attempts: try: attempts += 1 client.connect(**cfg) break except socket.error as e: if attempts < max_attempts: print("SSH to host {0} failed, retrying...".format(host)) time.sleep(max_timeout) else: print("SSH Exception: {0}".format(e)) else: raise MaxConnectionAttemptsError( "Exceeded max attempts to connect to host {0} after {1} retries".format(host, max_attempts) ) yield Connection(client, forward_agent, sudoable)
[ "def", "ssh", "(", "host", ",", "forward_agent", "=", "False", ",", "sudoable", "=", "False", ",", "max_attempts", "=", "1", ",", "max_timeout", "=", "5", ",", "ssh_password", "=", "None", ")", ":", "with", "closing", "(", "SSHClient", "(", ")", ")", ...
Manages a SSH connection to the desired host. Will leverage your ssh config at ~/.ssh/config if available :param host: the server to connect to :type host: str :param forward_agent: forward the local agents :type forward_agent: bool :param sudoable: allow sudo commands :type sudoable: bool :param max_attempts: the maximum attempts to connect to the desired host :type max_attempts: int :param max_timeout: the maximum timeout in seconds to sleep between attempts :type max_timeout: int :param ssh_password: SSH password to use if needed :type ssh_password: str :returns a SSH connection to the desired host :rtype: Connection :raises MaxConnectionAttemptsError: Exceeded the maximum attempts to establish the SSH connection.
[ "Manages", "a", "SSH", "connection", "to", "the", "desired", "host", ".", "Will", "leverage", "your", "ssh", "config", "at", "~", "/", ".", "ssh", "/", "config", "if", "available" ]
python
train
InfoAgeTech/django-core
django_core/utils/loading.py
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/utils/loading.py#L82-L104
def get_class_from_settings_from_apps(settings_key): """Try and get a class from a settings path by lookin in installed apps. """ cls_path = getattr(settings, settings_key, None) if not cls_path: raise NotImplementedError() try: app_label = cls_path.split('.')[-2] model_name = cls_path.split('.')[-1] except ValueError: raise ImproperlyConfigured("{0} must be of the form " "'app_label.model_name'".format( settings_key)) app = apps.get_app_config(app_label).models_module if not app: raise ImproperlyConfigured("{0} setting refers to an app that has not " "been installed".format(settings_key)) return getattr(app, model_name)
[ "def", "get_class_from_settings_from_apps", "(", "settings_key", ")", ":", "cls_path", "=", "getattr", "(", "settings", ",", "settings_key", ",", "None", ")", "if", "not", "cls_path", ":", "raise", "NotImplementedError", "(", ")", "try", ":", "app_label", "=", ...
Try and get a class from a settings path by lookin in installed apps.
[ "Try", "and", "get", "a", "class", "from", "a", "settings", "path", "by", "lookin", "in", "installed", "apps", "." ]
python
train
rbuffat/pyepw
pyepw/epw.py
https://github.com/rbuffat/pyepw/blob/373d4d3c8386c8d35789f086ac5f6018c2711745/pyepw/epw.py#L1074-L1095
def db996(self, value=None): """ Corresponds to IDD Field `db996` Dry-bulb temperature corresponding to 99.6% annual cumulative frequency of occurrence (cold conditions) Args: value (float): value for IDD Field `db996` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `db996`'.format(value)) self._db996 = value
[ "def", "db996", "(", "self", ",", "value", "=", "None", ")", ":", "if", "value", "is", "not", "None", ":", "try", ":", "value", "=", "float", "(", "value", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'value {} need to be of type float '"...
Corresponds to IDD Field `db996` Dry-bulb temperature corresponding to 99.6% annual cumulative frequency of occurrence (cold conditions) Args: value (float): value for IDD Field `db996` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
[ "Corresponds", "to", "IDD", "Field", "db996", "Dry", "-", "bulb", "temperature", "corresponding", "to", "99", ".", "6%", "annual", "cumulative", "frequency", "of", "occurrence", "(", "cold", "conditions", ")" ]
python
train
JasonKessler/scattertext
scattertext/TermDocMatrix.py
https://github.com/JasonKessler/scattertext/blob/cacf1f687d218ee8cae3fc05cc901db824bb1b81/scattertext/TermDocMatrix.py#L391-L414
def get_logistic_regression_coefs_l2(self, category, clf=RidgeClassifierCV()): ''' Computes l2-penalized logistic regression score. Parameters ---------- category : str category name to score category : str category name to score Returns ------- (coefficient array, accuracy, majority class baseline accuracy) ''' try: from sklearn.cross_validation import cross_val_predict except: from sklearn.model_selection import cross_val_predict y = self._get_mask_from_category(category) X = TfidfTransformer().fit_transform(self._X) clf.fit(X, y) y_hat = cross_val_predict(clf, X, y) acc, baseline = self._get_accuracy_and_baseline_accuracy(y, y_hat) return clf.coef_[0], acc, baseline
[ "def", "get_logistic_regression_coefs_l2", "(", "self", ",", "category", ",", "clf", "=", "RidgeClassifierCV", "(", ")", ")", ":", "try", ":", "from", "sklearn", ".", "cross_validation", "import", "cross_val_predict", "except", ":", "from", "sklearn", ".", "mode...
Computes l2-penalized logistic regression score. Parameters ---------- category : str category name to score category : str category name to score Returns ------- (coefficient array, accuracy, majority class baseline accuracy)
[ "Computes", "l2", "-", "penalized", "logistic", "regression", "score", ".", "Parameters", "----------", "category", ":", "str", "category", "name", "to", "score" ]
python
train
netom/satispy
satispy/io/dimacs_cnf.py
https://github.com/netom/satispy/blob/0201a7bffd9070441b9e82187348d61c53922b6b/satispy/io/dimacs_cnf.py#L18-L51
def tostring(self, cnf): """Convert Cnf object ot Dimacs cnf string cnf: Cnf object In the converted Cnf there will be only numbers for variable names. The conversion guarantees that the variables will be numbered alphabetically. """ self.varname_dict = {} self.varobj_dict = {} varis = set() for d in cnf.dis: for v in d: varis.add(v.name) ret = "p cnf %d %d" % (len(varis), len(cnf.dis)) varis = dict(list(zip(sorted(list(varis)),list(map(str,list(range(1,len(varis)+1))))))) for v in varis: vo = Variable(v) self.varname_dict[vo] = varis[v] self.varobj_dict[varis[v]] = vo for d in cnf.dis: ret += "\n" vnamelist = [] for v in d: vnamelist.append(("-" if v.inverted else "") + varis[v.name]) ret += " ".join(vnamelist) + " 0" return ret
[ "def", "tostring", "(", "self", ",", "cnf", ")", ":", "self", ".", "varname_dict", "=", "{", "}", "self", ".", "varobj_dict", "=", "{", "}", "varis", "=", "set", "(", ")", "for", "d", "in", "cnf", ".", "dis", ":", "for", "v", "in", "d", ":", ...
Convert Cnf object ot Dimacs cnf string cnf: Cnf object In the converted Cnf there will be only numbers for variable names. The conversion guarantees that the variables will be numbered alphabetically.
[ "Convert", "Cnf", "object", "ot", "Dimacs", "cnf", "string", "cnf", ":", "Cnf", "object", "In", "the", "converted", "Cnf", "there", "will", "be", "only", "numbers", "for", "variable", "names", ".", "The", "conversion", "guarantees", "that", "the", "variables...
python
test
softlayer/softlayer-python
SoftLayer/CLI/user/detail.py
https://github.com/softlayer/softlayer-python/blob/9f181be08cc3668353b05a6de0cb324f52cff6fa/SoftLayer/CLI/user/detail.py#L138-L143
def print_logins(logins): """Prints out the login history for a user""" table = formatting.Table(['Date', 'IP Address', 'Successufl Login?']) for login in logins: table.add_row([login.get('createDate'), login.get('ipAddress'), login.get('successFlag')]) return table
[ "def", "print_logins", "(", "logins", ")", ":", "table", "=", "formatting", ".", "Table", "(", "[", "'Date'", ",", "'IP Address'", ",", "'Successufl Login?'", "]", ")", "for", "login", "in", "logins", ":", "table", ".", "add_row", "(", "[", "login", ".",...
Prints out the login history for a user
[ "Prints", "out", "the", "login", "history", "for", "a", "user" ]
python
train
uber/rides-python-sdk
uber_rides/client.py
https://github.com/uber/rides-python-sdk/blob/76ecd75ab5235d792ec1010e36eca679ba285127/uber_rides/client.py#L863-L876
def validiate_webhook_signature(self, webhook, signature): """Validates a webhook signature from a webhook body + client secret Parameters webhook (string) The request body of the webhook. signature (string) The webhook signature specified in X-Uber-Signature header. """ digester = hmac.new(self.session.oauth2credential.client_secret, webhook, hashlib.sha256 ) return (signature == digester.hexdigest())
[ "def", "validiate_webhook_signature", "(", "self", ",", "webhook", ",", "signature", ")", ":", "digester", "=", "hmac", ".", "new", "(", "self", ".", "session", ".", "oauth2credential", ".", "client_secret", ",", "webhook", ",", "hashlib", ".", "sha256", ")"...
Validates a webhook signature from a webhook body + client secret Parameters webhook (string) The request body of the webhook. signature (string) The webhook signature specified in X-Uber-Signature header.
[ "Validates", "a", "webhook", "signature", "from", "a", "webhook", "body", "+", "client", "secret" ]
python
train
atlassian-api/atlassian-python-api
atlassian/confluence.py
https://github.com/atlassian-api/atlassian-python-api/blob/540d269905c3e7547b666fe30c647b2d512cf358/atlassian/confluence.py#L304-L317
def get_all_spaces(self, start=0, limit=500): """ Get all spaces with provided limit :param start: OPTIONAL: The start point of the collection to return. Default: None (0). :param limit: OPTIONAL: The limit of the number of pages to return, this may be restricted by fixed system limits. Default: 500 """ url = 'rest/api/space' params = {} if limit: params['limit'] = limit if start: params['start'] = start return (self.get(url, params=params) or {}).get('results')
[ "def", "get_all_spaces", "(", "self", ",", "start", "=", "0", ",", "limit", "=", "500", ")", ":", "url", "=", "'rest/api/space'", "params", "=", "{", "}", "if", "limit", ":", "params", "[", "'limit'", "]", "=", "limit", "if", "start", ":", "params", ...
Get all spaces with provided limit :param start: OPTIONAL: The start point of the collection to return. Default: None (0). :param limit: OPTIONAL: The limit of the number of pages to return, this may be restricted by fixed system limits. Default: 500
[ "Get", "all", "spaces", "with", "provided", "limit", ":", "param", "start", ":", "OPTIONAL", ":", "The", "start", "point", "of", "the", "collection", "to", "return", ".", "Default", ":", "None", "(", "0", ")", ".", ":", "param", "limit", ":", "OPTIONAL...
python
train
peshay/tpm
tpm.py
https://github.com/peshay/tpm/blob/8e64a4d8b89d54bdd2c92d965463a7508aa3d0bc/tpm.py#L558-L564
def create_group(self, data): """Create a Group.""" # http://teampasswordmanager.com/docs/api-groups/#create_group log.info('Create group with %s' % data) NewID = self.post('groups.json', data).get('id') log.info('Group has been created with ID %s' % NewID) return NewID
[ "def", "create_group", "(", "self", ",", "data", ")", ":", "# http://teampasswordmanager.com/docs/api-groups/#create_group", "log", ".", "info", "(", "'Create group with %s'", "%", "data", ")", "NewID", "=", "self", ".", "post", "(", "'groups.json'", ",", "data", ...
Create a Group.
[ "Create", "a", "Group", "." ]
python
train
ARMmbed/yotta
yotta/lib/access.py
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access.py#L332-L354
def _satisfyVersionByInstallingVersion(name, version_required, working_directory, version, type='module', inherit_shrinkwrap=None): ''' installs and returns a Component/Target for the specified version requirement into 'working_directory' using the provided remote version object. This function is not normally called via `satisfyVersionByInstalling', which looks up a suitable remote version object. ''' assert(version) logger.info('download %s', version) version.unpackInto(working_directory) r = _clsForType(type)(working_directory, inherit_shrinkwrap = inherit_shrinkwrap) if not r: raise Exception( 'Dependency "%s":"%s" is not a valid %s.' % (name, version_required, type) ) if name != r.getName(): raise Exception('%s %s (specification %s) has incorrect name %s' % ( type, name, version_required, r.getName() )) # error code deliberately ignored here for now, it isn't clear what the # behaviour should be (abort? remove the unpacked state then abort? # continue?) r.runScript('postInstall') return r
[ "def", "_satisfyVersionByInstallingVersion", "(", "name", ",", "version_required", ",", "working_directory", ",", "version", ",", "type", "=", "'module'", ",", "inherit_shrinkwrap", "=", "None", ")", ":", "assert", "(", "version", ")", "logger", ".", "info", "("...
installs and returns a Component/Target for the specified version requirement into 'working_directory' using the provided remote version object. This function is not normally called via `satisfyVersionByInstalling', which looks up a suitable remote version object.
[ "installs", "and", "returns", "a", "Component", "/", "Target", "for", "the", "specified", "version", "requirement", "into", "working_directory", "using", "the", "provided", "remote", "version", "object", ".", "This", "function", "is", "not", "normally", "called", ...
python
valid
NetEaseGame/ATX
atx/patch.py
https://github.com/NetEaseGame/ATX/blob/f4415c57b45cb0730e08899cbc92a2af1c047ffb/atx/patch.py#L70-L80
def go(fn): ''' Decorator ''' def decorator(*args, **kwargs): log.info('begin run func(%s) in background', fn.__name__) t = threading.Thread(target=fn, args=args, kwargs=kwargs) t.setDaemon(True) t.start() return t return decorator
[ "def", "go", "(", "fn", ")", ":", "def", "decorator", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "log", ".", "info", "(", "'begin run func(%s) in background'", ",", "fn", ".", "__name__", ")", "t", "=", "threading", ".", "Thread", "(", "tar...
Decorator
[ "Decorator" ]
python
train
SpriteLink/NIPAP
pynipap/pynipap.py
https://github.com/SpriteLink/NIPAP/blob/f96069f11ab952d80b13cab06e0528f2d24b3de9/pynipap/pynipap.py#L1116-L1141
def list(cls, spec=None): """ List prefixes. Maps to the function :py:func:`nipap.backend.Nipap.list_prefix` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ if spec is None: spec = {} xmlrpc = XMLRPCConnection() try: pref_list = xmlrpc.connection.list_prefix( { 'prefix': spec, 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) res = list() for pref in pref_list: p = Prefix.from_dict(pref) res.append(p) return res
[ "def", "list", "(", "cls", ",", "spec", "=", "None", ")", ":", "if", "spec", "is", "None", ":", "spec", "=", "{", "}", "xmlrpc", "=", "XMLRPCConnection", "(", ")", "try", ":", "pref_list", "=", "xmlrpc", ".", "connection", ".", "list_prefix", "(", ...
List prefixes. Maps to the function :py:func:`nipap.backend.Nipap.list_prefix` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values.
[ "List", "prefixes", "." ]
python
train
saltstack/salt
salt/transport/tcp.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/transport/tcp.py#L1007-L1033
def _connect(self): ''' Try to connect for the rest of time! ''' while True: if self._closing: break try: kwargs = {} if self.source_ip or self.source_port: if tornado.version_info >= (4, 5): ### source_ip and source_port are supported only in Tornado >= 4.5 # See http://www.tornadoweb.org/en/stable/releases/v4.5.0.html # Otherwise will just ignore these args kwargs = {'source_ip': self.source_ip, 'source_port': self.source_port} else: log.warning('If you need a certain source IP/port, consider upgrading Tornado >= 4.5') with salt.utils.asynchronous.current_ioloop(self.io_loop): self._stream = yield self._tcp_client.connect(self.host, self.port, ssl_options=self.opts.get('ssl'), **kwargs) self._connecting_future.set_result(True) break except Exception as e: yield tornado.gen.sleep(1)
[ "def", "_connect", "(", "self", ")", ":", "while", "True", ":", "if", "self", ".", "_closing", ":", "break", "try", ":", "kwargs", "=", "{", "}", "if", "self", ".", "source_ip", "or", "self", ".", "source_port", ":", "if", "tornado", ".", "version_in...
Try to connect for the rest of time!
[ "Try", "to", "connect", "for", "the", "rest", "of", "time!" ]
python
train
apache/spark
python/pyspark/ml/wrapper.py
https://github.com/apache/spark/blob/618d6bff71073c8c93501ab7392c3cc579730f0b/python/pyspark/ml/wrapper.py#L44-L49
def _create_from_java_class(cls, java_class, *args): """ Construct this object from given Java classname and arguments """ java_obj = JavaWrapper._new_java_obj(java_class, *args) return cls(java_obj)
[ "def", "_create_from_java_class", "(", "cls", ",", "java_class", ",", "*", "args", ")", ":", "java_obj", "=", "JavaWrapper", ".", "_new_java_obj", "(", "java_class", ",", "*", "args", ")", "return", "cls", "(", "java_obj", ")" ]
Construct this object from given Java classname and arguments
[ "Construct", "this", "object", "from", "given", "Java", "classname", "and", "arguments" ]
python
train
asweigart/pysimplevalidate
src/pysimplevalidate/__init__.py
https://github.com/asweigart/pysimplevalidate/blob/3ca27228abb7355d14bbf8abc225c63366379e44/src/pysimplevalidate/__init__.py#L526-L608
def validateChoice(value, choices, blank=False, strip=None, allowlistRegexes=None, blocklistRegexes=None, numbered=False, lettered=False, caseSensitive=False, excMsg=None): """Raises ValidationException if value is not one of the values in choices. Returns the selected choice. Returns the value in choices that was selected, so it can be used inline in an expression: print('You chose ' + validateChoice(your_choice, ['cat', 'dog'])) Note that value itself is not returned: validateChoice('CAT', ['cat', 'dog']) will return 'cat', not 'CAT'. If lettered is True, lower or uppercase letters will be accepted regardless of what caseSensitive is set to. The caseSensitive argument only matters for matching with the text of the strings in choices. * value (str): The value being validated. * blank (bool): If True, a blank string will be accepted. Defaults to False. * strip (bool, str, None): If None, whitespace is stripped from value. If a str, the characters in it are stripped from value. If False, nothing is stripped. * allowlistRegexes (Sequence, None): A sequence of regex str that will explicitly pass validation, even if they aren't numbers. * blocklistRegexes (Sequence, None): A sequence of regex str or (regex_str, response_str) tuples that, if matched, will explicitly fail validation. * numbered (bool): If True, this function will also accept a string of the choice's number, i.e. '1' or '2'. * lettered (bool): If True, this function will also accept a string of the choice's letter, i.e. 'A' or 'B' or 'a' or 'b'. * caseSensitive (bool): If True, then the exact case of the option must be entered. * excMsg (str): A custom message to use in the raised ValidationException. Returns the choice selected as it appeared in choices. That is, if 'cat' was a choice and the user entered 'CAT' while caseSensitive is False, this function will return 'cat'. >>> import pysimplevalidate as pysv >>> pysv.validateChoice('dog', ['dog', 'cat', 'moose']) 'dog' >>> pysv.validateChoice('DOG', ['dog', 'cat', 'moose']) 'dog' >>> pysv.validateChoice('2', ['dog', 'cat', 'moose'], numbered=True) 'cat' >>> pysv.validateChoice('a', ['dog', 'cat', 'moose'], lettered=True) 'dog' >>> pysv.validateChoice('C', ['dog', 'cat', 'moose'], lettered=True) 'moose' >>> pysv.validateChoice('dog', ['dog', 'cat', 'moose'], lettered=True) 'dog' >>> pysv.validateChoice('spider', ['dog', 'cat', 'moose']) Traceback (most recent call last): ... pysimplevalidate.ValidationException: 'spider' is not a valid choice. """ # Validate parameters. _validateParamsFor_validateChoice(choices=choices, blank=blank, strip=strip, allowlistRegexes=None, blocklistRegexes=blocklistRegexes, numbered=numbered, lettered=lettered, caseSensitive=caseSensitive) if '' in choices: # blank needs to be set to True here, otherwise '' won't be accepted as a choice. blank = True returnNow, value = _prevalidationCheck(value, blank, strip, allowlistRegexes, blocklistRegexes, excMsg) if returnNow: return value # Validate against choices. if value in choices: return value if numbered and value.isdigit() and 0 < int(value) <= len(choices): # value must be 1 to len(choices) # Numbered options begins at 1, not 0. return choices[int(value) - 1] # -1 because the numbers are 1 to len(choices) but the index are 0 to len(choices) - 1 if lettered and len(value) == 1 and value.isalpha() and 0 < ord(value.upper()) - 64 <= len(choices): # Lettered options are always case-insensitive. return choices[ord(value.upper()) - 65] if not caseSensitive and value.upper() in [choice.upper() for choice in choices]: # Return the original item in choices that value has a case-insensitive match with. return choices[[choice.upper() for choice in choices].index(value.upper())] _raiseValidationException(_('%r is not a valid choice.') % (_errstr(value)), excMsg)
[ "def", "validateChoice", "(", "value", ",", "choices", ",", "blank", "=", "False", ",", "strip", "=", "None", ",", "allowlistRegexes", "=", "None", ",", "blocklistRegexes", "=", "None", ",", "numbered", "=", "False", ",", "lettered", "=", "False", ",", "...
Raises ValidationException if value is not one of the values in choices. Returns the selected choice. Returns the value in choices that was selected, so it can be used inline in an expression: print('You chose ' + validateChoice(your_choice, ['cat', 'dog'])) Note that value itself is not returned: validateChoice('CAT', ['cat', 'dog']) will return 'cat', not 'CAT'. If lettered is True, lower or uppercase letters will be accepted regardless of what caseSensitive is set to. The caseSensitive argument only matters for matching with the text of the strings in choices. * value (str): The value being validated. * blank (bool): If True, a blank string will be accepted. Defaults to False. * strip (bool, str, None): If None, whitespace is stripped from value. If a str, the characters in it are stripped from value. If False, nothing is stripped. * allowlistRegexes (Sequence, None): A sequence of regex str that will explicitly pass validation, even if they aren't numbers. * blocklistRegexes (Sequence, None): A sequence of regex str or (regex_str, response_str) tuples that, if matched, will explicitly fail validation. * numbered (bool): If True, this function will also accept a string of the choice's number, i.e. '1' or '2'. * lettered (bool): If True, this function will also accept a string of the choice's letter, i.e. 'A' or 'B' or 'a' or 'b'. * caseSensitive (bool): If True, then the exact case of the option must be entered. * excMsg (str): A custom message to use in the raised ValidationException. Returns the choice selected as it appeared in choices. That is, if 'cat' was a choice and the user entered 'CAT' while caseSensitive is False, this function will return 'cat'. >>> import pysimplevalidate as pysv >>> pysv.validateChoice('dog', ['dog', 'cat', 'moose']) 'dog' >>> pysv.validateChoice('DOG', ['dog', 'cat', 'moose']) 'dog' >>> pysv.validateChoice('2', ['dog', 'cat', 'moose'], numbered=True) 'cat' >>> pysv.validateChoice('a', ['dog', 'cat', 'moose'], lettered=True) 'dog' >>> pysv.validateChoice('C', ['dog', 'cat', 'moose'], lettered=True) 'moose' >>> pysv.validateChoice('dog', ['dog', 'cat', 'moose'], lettered=True) 'dog' >>> pysv.validateChoice('spider', ['dog', 'cat', 'moose']) Traceback (most recent call last): ... pysimplevalidate.ValidationException: 'spider' is not a valid choice.
[ "Raises", "ValidationException", "if", "value", "is", "not", "one", "of", "the", "values", "in", "choices", ".", "Returns", "the", "selected", "choice", "." ]
python
train
wuher/devil
devil/mappers/xmlmapper.py
https://github.com/wuher/devil/blob/a8834d4f88d915a21754c6b96f99d0ad9123ad4d/devil/mappers/xmlmapper.py#L93-L118
def _to_xml(self, xml, data, key=None): """ Recursively convert the data into xml. This function was originally copied from the `Piston project <https://bitbucket.org/jespern/django-piston/>`_ It has been modified since. :param xml: the xml document :type xml: SimplerXMLGenerator :param data: data to be formatted :param key: name of the parent element (for root this is ``None``) """ if isinstance(data, (list, tuple)): for item in data: elemname = self._list_item_element_name(key) xml.startElement(elemname, {}) self._to_xml(xml, item) xml.endElement(elemname) elif isinstance(data, dict): for key, value in data.iteritems(): xml.startElement(key, {}) self._to_xml(xml, value, key) xml.endElement(key) else: xml.characters(smart_unicode(data))
[ "def", "_to_xml", "(", "self", ",", "xml", ",", "data", ",", "key", "=", "None", ")", ":", "if", "isinstance", "(", "data", ",", "(", "list", ",", "tuple", ")", ")", ":", "for", "item", "in", "data", ":", "elemname", "=", "self", ".", "_list_item...
Recursively convert the data into xml. This function was originally copied from the `Piston project <https://bitbucket.org/jespern/django-piston/>`_ It has been modified since. :param xml: the xml document :type xml: SimplerXMLGenerator :param data: data to be formatted :param key: name of the parent element (for root this is ``None``)
[ "Recursively", "convert", "the", "data", "into", "xml", "." ]
python
train
noahgoldman/adbpy
adbpy/socket.py
https://github.com/noahgoldman/adbpy/blob/ecbff8a8f151852b5c36847dc812582a8674a503/adbpy/socket.py#L30-L35
def connect(self): """Connect to the given socket""" if not self.connected: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.connect(self.address) self.connected = True
[ "def", "connect", "(", "self", ")", ":", "if", "not", "self", ".", "connected", ":", "self", ".", "socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "self", ".", "socket", ".", "connect", "(...
Connect to the given socket
[ "Connect", "to", "the", "given", "socket" ]
python
train
apple/turicreate
src/external/coremltools_wrap/coremltools/coremltools/models/utils.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/external/coremltools_wrap/coremltools/coremltools/models/utils.py#L28-L59
def save_spec(spec, filename): """ Save a protobuf model specification to file. Parameters ---------- spec: Model_pb Protobuf representation of the model filename: str File path where the spec gets saved. Examples -------- .. sourcecode:: python >>> coremltools.utils.save_spec(spec, 'HousePricer.mlmodel') See Also -------- load_spec """ name, ext = _os.path.splitext(filename) if not ext: filename = "%s.mlmodel" % filename else: if ext != '.mlmodel': raise Exception("Extension must be .mlmodel (not %s)" % ext) with open(filename, 'wb') as f: s = spec.SerializeToString() f.write(s)
[ "def", "save_spec", "(", "spec", ",", "filename", ")", ":", "name", ",", "ext", "=", "_os", ".", "path", ".", "splitext", "(", "filename", ")", "if", "not", "ext", ":", "filename", "=", "\"%s.mlmodel\"", "%", "filename", "else", ":", "if", "ext", "!=...
Save a protobuf model specification to file. Parameters ---------- spec: Model_pb Protobuf representation of the model filename: str File path where the spec gets saved. Examples -------- .. sourcecode:: python >>> coremltools.utils.save_spec(spec, 'HousePricer.mlmodel') See Also -------- load_spec
[ "Save", "a", "protobuf", "model", "specification", "to", "file", "." ]
python
train
NyashniyVladya/MarkovTextGenerator
MarkovTextGenerator/markov_text_generator.py
https://github.com/NyashniyVladya/MarkovTextGenerator/blob/3d90e02a507939709773ef01c7ff3ec68b2b8d4b/MarkovTextGenerator/markov_text_generator.py#L105-L129
def get_optimal_variant(self, variants, start_words, **kwargs): """ Возвращает оптимальный вариант, из выборки. """ if not start_words: return (choice(variants), {}) _variants = [] _weights = [] for tok in frozenset(variants): if not self.token_is_correct(tok): continue weight = variants.count(tok) for word in start_words: for token in self.ONLY_WORDS.finditer(word.strip().lower()): if token.group() == tok: weight <<= 1 _variants.append(tok) _weights.append(weight) if not _variants: return (choice(variants), {}) return (choices(_variants, weights=_weights, k=1)[0], {})
[ "def", "get_optimal_variant", "(", "self", ",", "variants", ",", "start_words", ",", "*", "*", "kwargs", ")", ":", "if", "not", "start_words", ":", "return", "(", "choice", "(", "variants", ")", ",", "{", "}", ")", "_variants", "=", "[", "]", "_weights...
Возвращает оптимальный вариант, из выборки.
[ "Возвращает", "оптимальный", "вариант", "из", "выборки", "." ]
python
valid
mitsei/dlkit
dlkit/json_/cataloging/sessions.py
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/cataloging/sessions.py#L1368-L1386
def remove_child_catalog(self, catalog_id, child_id): """Removes a child from a catalog. arg: catalog_id (osid.id.Id): the ``Id`` of a catalog arg: child_id (osid.id.Id): the ``Id`` of the new child raise: NotFound - ``catalog_id`` is not a parent of ``child_id`` raise: NullArgument - ``catalog_id`` or ``child_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.BinHierarchyDesignSession.remove_child_bin_template if self._catalog_session is not None: return self._catalog_session.remove_child_catalog(catalog_id=catalog_id, child_id=child_id) return self._hierarchy_session.remove_child(id_=catalog_id, child_id=child_id)
[ "def", "remove_child_catalog", "(", "self", ",", "catalog_id", ",", "child_id", ")", ":", "# Implemented from template for", "# osid.resource.BinHierarchyDesignSession.remove_child_bin_template", "if", "self", ".", "_catalog_session", "is", "not", "None", ":", "return", "se...
Removes a child from a catalog. arg: catalog_id (osid.id.Id): the ``Id`` of a catalog arg: child_id (osid.id.Id): the ``Id`` of the new child raise: NotFound - ``catalog_id`` is not a parent of ``child_id`` raise: NullArgument - ``catalog_id`` or ``child_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.*
[ "Removes", "a", "child", "from", "a", "catalog", "." ]
python
train
PythonCharmers/python-future
src/future/backports/datetime.py
https://github.com/PythonCharmers/python-future/blob/c423752879acc05eebc29b0bb9909327bd5c7308/src/future/backports/datetime.py#L1600-L1603
def strptime(cls, date_string, format): 'string, format -> new datetime parsed from a string (like time.strptime()).' import _strptime return _strptime._strptime_datetime(cls, date_string, format)
[ "def", "strptime", "(", "cls", ",", "date_string", ",", "format", ")", ":", "import", "_strptime", "return", "_strptime", ".", "_strptime_datetime", "(", "cls", ",", "date_string", ",", "format", ")" ]
string, format -> new datetime parsed from a string (like time.strptime()).
[ "string", "format", "-", ">", "new", "datetime", "parsed", "from", "a", "string", "(", "like", "time", ".", "strptime", "()", ")", "." ]
python
train
dwavesystems/dwave-cloud-client
dwave/cloud/cli.py
https://github.com/dwavesystems/dwave-cloud-client/blob/df3221a8385dc0c04d7b4d84f740bf3ad6706230/dwave/cloud/cli.py#L274-L298
def ping(config_file, profile, solver_def, json_output, request_timeout, polling_timeout): """Ping the QPU by submitting a single-qubit problem.""" now = utcnow() info = dict(datetime=now.isoformat(), timestamp=datetime_to_timestamp(now), code=0) def output(fmt, **kwargs): info.update(kwargs) if not json_output: click.echo(fmt.format(**kwargs)) def flush(): if json_output: click.echo(json.dumps(info)) try: _ping(config_file, profile, solver_def, request_timeout, polling_timeout, output) except CLIError as error: output("Error: {error} (code: {code})", error=str(error), code=error.code) sys.exit(error.code) except Exception as error: output("Unhandled error: {error}", error=str(error)) sys.exit(127) finally: flush()
[ "def", "ping", "(", "config_file", ",", "profile", ",", "solver_def", ",", "json_output", ",", "request_timeout", ",", "polling_timeout", ")", ":", "now", "=", "utcnow", "(", ")", "info", "=", "dict", "(", "datetime", "=", "now", ".", "isoformat", "(", "...
Ping the QPU by submitting a single-qubit problem.
[ "Ping", "the", "QPU", "by", "submitting", "a", "single", "-", "qubit", "problem", "." ]
python
train
apache/incubator-heron
heron/instance/src/python/utils/topology/topology_context_impl.py
https://github.com/apache/incubator-heron/blob/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac/heron/instance/src/python/utils/topology/topology_context_impl.py#L110-L116
def get_component_tasks(self, component_id): """Returns the task ids allocated for the given component id""" ret = [] for task_id, comp_id in self.task_to_component_map.items(): if comp_id == component_id: ret.append(task_id) return ret
[ "def", "get_component_tasks", "(", "self", ",", "component_id", ")", ":", "ret", "=", "[", "]", "for", "task_id", ",", "comp_id", "in", "self", ".", "task_to_component_map", ".", "items", "(", ")", ":", "if", "comp_id", "==", "component_id", ":", "ret", ...
Returns the task ids allocated for the given component id
[ "Returns", "the", "task", "ids", "allocated", "for", "the", "given", "component", "id" ]
python
valid
tanghaibao/goatools
goatools/grouper/wrxlsx.py
https://github.com/tanghaibao/goatools/blob/407682e573a108864a79031f8ca19ee3bf377626/goatools/grouper/wrxlsx.py#L85-L91
def _prt_txt_desc2nts(self, prt, desc2nts, prtfmt=None): """Print grouped and sorted GO IDs.""" if prtfmt is None: prtfmt = self.get_prtfmt("fmta") if self.ver_list is not None: prt.write("# Versions:\n# {VER}\n".format(VER="\n# ".join(self.ver_list))) self.prt_txt_desc2nts(prt, desc2nts, prtfmt)
[ "def", "_prt_txt_desc2nts", "(", "self", ",", "prt", ",", "desc2nts", ",", "prtfmt", "=", "None", ")", ":", "if", "prtfmt", "is", "None", ":", "prtfmt", "=", "self", ".", "get_prtfmt", "(", "\"fmta\"", ")", "if", "self", ".", "ver_list", "is", "not", ...
Print grouped and sorted GO IDs.
[ "Print", "grouped", "and", "sorted", "GO", "IDs", "." ]
python
train
Spirent/py-stcrestclient
stcrestclient/stchttp.py
https://github.com/Spirent/py-stcrestclient/blob/80ee82bddf2fb2808f3da8ff2c80b7d588e165e8/stcrestclient/stchttp.py#L309-L320
def bll_version(self): """Get the BLL version this session is connected to. Return: Version string if session started. None if session not started. """ if not self.started(): return None status, data = self._rest.get_request('objects', 'system1', ['version', 'name']) return data['version']
[ "def", "bll_version", "(", "self", ")", ":", "if", "not", "self", ".", "started", "(", ")", ":", "return", "None", "status", ",", "data", "=", "self", ".", "_rest", ".", "get_request", "(", "'objects'", ",", "'system1'", ",", "[", "'version'", ",", "...
Get the BLL version this session is connected to. Return: Version string if session started. None if session not started.
[ "Get", "the", "BLL", "version", "this", "session", "is", "connected", "to", "." ]
python
train
inveniosoftware/invenio-accounts
invenio_accounts/models.py
https://github.com/inveniosoftware/invenio-accounts/blob/b0d2f0739b00dbefea22ca15d7d374a1b4a63aec/invenio_accounts/models.py#L141-L145
def query_by_expired(cls): """Query to select all expired sessions.""" lifetime = current_app.permanent_session_lifetime expired_moment = datetime.utcnow() - lifetime return cls.query.filter(cls.created < expired_moment)
[ "def", "query_by_expired", "(", "cls", ")", ":", "lifetime", "=", "current_app", ".", "permanent_session_lifetime", "expired_moment", "=", "datetime", ".", "utcnow", "(", ")", "-", "lifetime", "return", "cls", ".", "query", ".", "filter", "(", "cls", ".", "c...
Query to select all expired sessions.
[ "Query", "to", "select", "all", "expired", "sessions", "." ]
python
train
Parsl/parsl
parsl/dataflow/flow_control.py
https://github.com/Parsl/parsl/blob/d7afb3bc37f50dcf224ae78637944172edb35dac/parsl/dataflow/flow_control.py#L123-L129
def notify(self, event_id): """Let the FlowControl system know that there is an event.""" self._event_buffer.extend([event_id]) self._event_count += 1 if self._event_count >= self.threshold: logger.debug("Eventcount >= threshold") self.make_callback(kind="event")
[ "def", "notify", "(", "self", ",", "event_id", ")", ":", "self", ".", "_event_buffer", ".", "extend", "(", "[", "event_id", "]", ")", "self", ".", "_event_count", "+=", "1", "if", "self", ".", "_event_count", ">=", "self", ".", "threshold", ":", "logge...
Let the FlowControl system know that there is an event.
[ "Let", "the", "FlowControl", "system", "know", "that", "there", "is", "an", "event", "." ]
python
valid
line/line-bot-sdk-python
linebot/api.py
https://github.com/line/line-bot-sdk-python/blob/1b38bfc2497ff3e3c75be4b50e0f1b7425a07ce0/linebot/api.py#L451-L466
def unlink_rich_menu_from_user(self, user_id, timeout=None): """Call unlink rich menu from user API. https://developers.line.me/en/docs/messaging-api/reference/#unlink-rich-menu-from-user :param str user_id: ID of the user :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (connect timeout, read timeout) float tuple. Default is self.http_client.timeout :type timeout: float | tuple(float, float) """ self._delete( '/v2/bot/user/{user_id}/richmenu'.format(user_id=user_id), timeout=timeout )
[ "def", "unlink_rich_menu_from_user", "(", "self", ",", "user_id", ",", "timeout", "=", "None", ")", ":", "self", ".", "_delete", "(", "'/v2/bot/user/{user_id}/richmenu'", ".", "format", "(", "user_id", "=", "user_id", ")", ",", "timeout", "=", "timeout", ")" ]
Call unlink rich menu from user API. https://developers.line.me/en/docs/messaging-api/reference/#unlink-rich-menu-from-user :param str user_id: ID of the user :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (connect timeout, read timeout) float tuple. Default is self.http_client.timeout :type timeout: float | tuple(float, float)
[ "Call", "unlink", "rich", "menu", "from", "user", "API", "." ]
python
train